repository_name
stringclasses
316 values
func_path_in_repository
stringlengths
6
223
func_name
stringlengths
1
134
language
stringclasses
1 value
func_code_string
stringlengths
57
65.5k
func_documentation_string
stringlengths
1
46.3k
split_name
stringclasses
1 value
func_code_url
stringlengths
91
315
called_functions
listlengths
1
156
enclosing_scope
stringlengths
2
1.48M
VisTrails/tej
tej/submission.py
RemoteQueue.get_client
python
def get_client(self): if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh
Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L220-L238
[ "def _connect(self):\n \"\"\"Connects via SSH.\n \"\"\"\n ssh = self._ssh_client()\n logger.debug(\"Connecting with %s\",\n ', '.join('%s=%r' % (k, v if k != \"password\" else \"***\")\n for k, v in iteritems(self.destination)))\n ssh.connect(**self.destination)\n logger.debug(\"Connected to %s\", self.destination['hostname'])\n self._ssh = ssh\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue._call
python
def _call(self, cmd, get_output): server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close()
Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L243-L277
[ "def shell_escape(s):\n r\"\"\"Given bl\"a, returns \"bl\\\\\"a\".\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if not s or any(c not in safe_shell_chars for c in s):\n return '\"%s\"' % (s.replace('\\\\', '\\\\\\\\')\n .replace('\"', '\\\\\"')\n .replace('`', '\\\\`')\n .replace('$', '\\\\$'))\n else:\n return s\n", "def server_logger(self):\n \"\"\"Handles messages from the server.\n\n By default, uses getLogger('tej.server').warning(). Override this in\n subclasses to provide your own mechanism.\n \"\"\"\n return ServerLogger()\n", "def get_client(self):\n \"\"\"Gets the SSH client.\n\n This will check that the connection is still alive first, and reconnect\n if necessary.\n \"\"\"\n if self._ssh is None:\n self._connect()\n return self._ssh\n else:\n try:\n chan = self._ssh.get_transport().open_session()\n except (socket.error, paramiko.SSHException):\n logger.warning(\"Lost connection, reconnecting...\")\n self._ssh.close()\n self._connect()\n else:\n chan.close()\n return self._ssh\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.check_call
python
def check_call(self, cmd): ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret)
Calls a command through SSH.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L279-L284
[ "def _call(self, cmd, get_output):\n \"\"\"Calls a command through the SSH connection.\n\n Remote stderr gets printed to this program's stderr. Output is captured\n and may be returned.\n \"\"\"\n server_err = self.server_logger()\n\n chan = self.get_client().get_transport().open_session()\n try:\n logger.debug(\"Invoking %r%s\",\n cmd, \" (stdout)\" if get_output else \"\")\n chan.exec_command('/bin/sh -c %s' % shell_escape(cmd))\n output = b''\n while True:\n r, w, e = select.select([chan], [], [])\n if chan not in r:\n continue # pragma: no cover\n recvd = False\n while chan.recv_stderr_ready():\n data = chan.recv_stderr(1024)\n server_err.append(data)\n recvd = True\n while chan.recv_ready():\n data = chan.recv(1024)\n if get_output:\n output += data\n recvd = True\n if not recvd and chan.exit_status_ready():\n break\n output = output.rstrip(b'\\r\\n')\n return chan.recv_exit_status(), output\n finally:\n server_err.done()\n chan.close()\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.check_output
python
def check_output(self, cmd): ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output
Calls a command through SSH and returns its output.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L286-L293
[ "def _call(self, cmd, get_output):\n \"\"\"Calls a command through the SSH connection.\n\n Remote stderr gets printed to this program's stderr. Output is captured\n and may be returned.\n \"\"\"\n server_err = self.server_logger()\n\n chan = self.get_client().get_transport().open_session()\n try:\n logger.debug(\"Invoking %r%s\",\n cmd, \" (stdout)\" if get_output else \"\")\n chan.exec_command('/bin/sh -c %s' % shell_escape(cmd))\n output = b''\n while True:\n r, w, e = select.select([chan], [], [])\n if chan not in r:\n continue # pragma: no cover\n recvd = False\n while chan.recv_stderr_ready():\n data = chan.recv_stderr(1024)\n server_err.append(data)\n recvd = True\n while chan.recv_ready():\n data = chan.recv(1024)\n if get_output:\n output += data\n recvd = True\n if not recvd and chan.exit_status_ready():\n break\n output = output.rstrip(b'\\r\\n')\n return chan.recv_exit_status(), output\n finally:\n server_err.done()\n chan.close()\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue._resolve_queue
python
def _resolve_queue(self, queue, depth=0, links=None): if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way")
Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location).
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L295-L353
[ "def escape_queue(s):\n \"\"\"Escapes the path to a queue, e.g. preserves ~ at the begining.\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if s.startswith('~/'):\n return '~/' + shell_escape(s[2:])\n else:\n return shell_escape(s)\n", "def check_output(self, cmd):\n \"\"\"Calls a command through SSH and returns its output.\n \"\"\"\n ret, output = self._call(cmd, True)\n if ret != 0: # pragma: no cover\n raise RemoteCommandFailure(command=cmd, ret=ret)\n logger.debug(\"Output: %r\", output)\n return output\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue._get_queue
python
def _get_queue(self): if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue
Gets the actual location of the queue, or None.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L355-L364
[ "def _resolve_queue(self, queue, depth=0, links=None):\n \"\"\"Finds the location of tej's queue directory on the server.\n\n The `queue` set when constructing this `RemoteQueue` might be relative\n to the home directory and might contain ``~user`` placeholders. Also,\n each queue may in fact be a link to another path (a file containing\n the string ``tejdir:``, a space, and a new pathname, relative to this\n link's location).\n \"\"\"\n if depth == 0:\n logger.debug(\"resolve_queue(%s)\", queue)\n answer = self.check_output(\n 'if [ -d %(queue)s ]; then '\n ' cd %(queue)s; echo \"dir\"; cat version; pwd; '\n 'elif [ -f %(queue)s ]; then '\n ' cat %(queue)s; '\n 'else '\n ' echo no; '\n 'fi' % {\n 'queue': escape_queue(queue)})\n if answer == b'no':\n if depth > 0:\n logger.debug(\"Broken link at depth=%d\", depth)\n else:\n logger.debug(\"Path doesn't exist\")\n return None, depth\n elif answer.startswith(b'dir\\n'):\n version, runtime, path = answer[4:].split(b'\\n', 2)\n try:\n version = tuple(int(e)\n for e in version.decode('ascii', 'ignore')\n .split('.'))\n except ValueError:\n version = 0, 0\n if version[:2] != self.PROTOCOL_VERSION:\n raise QueueExists(\n msg=\"Queue exists and is using incompatible protocol \"\n \"version %s\" % '.'.join('%s' % e for e in version))\n path = PosixPath(path)\n runtime = runtime.decode('ascii', 'replace')\n if self.need_runtime is not None:\n if (self.need_runtime is not None and\n runtime not in self.need_runtime):\n raise QueueExists(\n msg=\"Queue exists and is using explicitely disallowed \"\n \"runtime %s\" % runtime)\n logger.debug(\"Found directory at %s, depth=%d, runtime=%s\",\n path, depth, runtime)\n return path, depth\n elif answer.startswith(b'tejdir: '):\n new = queue.parent / answer[8:]\n logger.debug(\"Found link to %s, recursing\", new)\n if links is not None:\n links.append(queue)\n return self._resolve_queue(new, depth + 1)\n else: # pragma: no cover\n logger.debug(\"Server returned %r\", answer)\n raise RemoteCommandFailure(msg=\"Queue resolution command failed \"\n \"in unexpected way\")\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.setup
python
def setup(self, links=None, force=False, only_links=False): if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)})
Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L366-L411
[ "def escape_queue(s):\n \"\"\"Escapes the path to a queue, e.g. preserves ~ at the begining.\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if s.startswith('~/'):\n return '~/' + shell_escape(s[2:])\n else:\n return shell_escape(s)\n", "def check_call(self, cmd):\n \"\"\"Calls a command through SSH.\n \"\"\"\n ret, _ = self._call(cmd, False)\n if ret != 0: # pragma: no cover\n raise RemoteCommandFailure(command=cmd, ret=ret)\n", "def _resolve_queue(self, queue, depth=0, links=None):\n \"\"\"Finds the location of tej's queue directory on the server.\n\n The `queue` set when constructing this `RemoteQueue` might be relative\n to the home directory and might contain ``~user`` placeholders. Also,\n each queue may in fact be a link to another path (a file containing\n the string ``tejdir:``, a space, and a new pathname, relative to this\n link's location).\n \"\"\"\n if depth == 0:\n logger.debug(\"resolve_queue(%s)\", queue)\n answer = self.check_output(\n 'if [ -d %(queue)s ]; then '\n ' cd %(queue)s; echo \"dir\"; cat version; pwd; '\n 'elif [ -f %(queue)s ]; then '\n ' cat %(queue)s; '\n 'else '\n ' echo no; '\n 'fi' % {\n 'queue': escape_queue(queue)})\n if answer == b'no':\n if depth > 0:\n logger.debug(\"Broken link at depth=%d\", depth)\n else:\n logger.debug(\"Path doesn't exist\")\n return None, depth\n elif answer.startswith(b'dir\\n'):\n version, runtime, path = answer[4:].split(b'\\n', 2)\n try:\n version = tuple(int(e)\n for e in version.decode('ascii', 'ignore')\n .split('.'))\n except ValueError:\n version = 0, 0\n if version[:2] != self.PROTOCOL_VERSION:\n raise QueueExists(\n msg=\"Queue exists and is using incompatible protocol \"\n \"version %s\" % '.'.join('%s' % e for e in version))\n path = PosixPath(path)\n runtime = runtime.decode('ascii', 'replace')\n if self.need_runtime is not None:\n if (self.need_runtime is not None and\n runtime not in self.need_runtime):\n raise QueueExists(\n msg=\"Queue exists and is using explicitely disallowed \"\n \"runtime %s\" % runtime)\n logger.debug(\"Found directory at %s, depth=%d, runtime=%s\",\n path, depth, runtime)\n return path, depth\n elif answer.startswith(b'tejdir: '):\n new = queue.parent / answer[8:]\n logger.debug(\"Found link to %s, recursing\", new)\n if links is not None:\n links.append(queue)\n return self._resolve_queue(new, depth + 1)\n else: # pragma: no cover\n logger.debug(\"Server returned %r\", answer)\n raise RemoteCommandFailure(msg=\"Queue resolution command failed \"\n \"in unexpected way\")\n", "def _setup(self):\n \"\"\"Actually installs the runtime.\n \"\"\"\n # Expands ~user in queue\n if self.queue.path[0:1] == b'/':\n queue = self.queue\n else:\n if self.queue.path[0:1] == b'~':\n output = self.check_output('echo %s' %\n escape_queue(self.queue))\n queue = PosixPath(output.rstrip(b'\\r\\n'))\n else:\n output = self.check_output('pwd')\n queue = PosixPath(output.rstrip(b'\\r\\n')) / self.queue\n logger.debug(\"Resolved to %s\", queue)\n\n # Select runtime\n if not self.setup_runtime:\n # Autoselect\n if self._call('which qsub', False)[0] == 0:\n logger.debug(\"qsub is available, using runtime 'pbs'\")\n runtime = 'pbs'\n else:\n logger.debug(\"qsub not found, using runtime 'default'\")\n runtime = 'default'\n else:\n runtime = self.setup_runtime\n\n if self.need_runtime is not None and runtime not in self.need_runtime:\n raise ValueError(\"About to setup runtime %s but that wouldn't \"\n \"match explicitely allowed runtimes\" % runtime)\n\n logger.info(\"Installing runtime %s%s at %s\",\n runtime,\n \"\" if self.setup_runtime else \" (auto)\",\n self.queue)\n\n # Uploads runtime\n scp_client = self.get_scp_client()\n filename = pkg_resources.resource_filename('tej',\n 'remotes/%s' % runtime)\n scp_client.put(filename, str(queue), recursive=True)\n logger.debug(\"Files uploaded\")\n\n # Runs post-setup script\n self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup'))\n logger.debug(\"Post-setup script done\")\n\n self._queue = queue\n return queue\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue._setup
python
def _setup(self): # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue
Actually installs the runtime.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L413-L462
[ "def shell_escape(s):\n r\"\"\"Given bl\"a, returns \"bl\\\\\"a\".\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if not s or any(c not in safe_shell_chars for c in s):\n return '\"%s\"' % (s.replace('\\\\', '\\\\\\\\')\n .replace('\"', '\\\\\"')\n .replace('`', '\\\\`')\n .replace('$', '\\\\$'))\n else:\n return s\n", "def escape_queue(s):\n \"\"\"Escapes the path to a queue, e.g. preserves ~ at the begining.\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if s.startswith('~/'):\n return '~/' + shell_escape(s[2:])\n else:\n return shell_escape(s)\n", "def get_scp_client(self):\n return scp.SCPClient(self.get_client().get_transport())\n", "def _call(self, cmd, get_output):\n \"\"\"Calls a command through the SSH connection.\n\n Remote stderr gets printed to this program's stderr. Output is captured\n and may be returned.\n \"\"\"\n server_err = self.server_logger()\n\n chan = self.get_client().get_transport().open_session()\n try:\n logger.debug(\"Invoking %r%s\",\n cmd, \" (stdout)\" if get_output else \"\")\n chan.exec_command('/bin/sh -c %s' % shell_escape(cmd))\n output = b''\n while True:\n r, w, e = select.select([chan], [], [])\n if chan not in r:\n continue # pragma: no cover\n recvd = False\n while chan.recv_stderr_ready():\n data = chan.recv_stderr(1024)\n server_err.append(data)\n recvd = True\n while chan.recv_ready():\n data = chan.recv(1024)\n if get_output:\n output += data\n recvd = True\n if not recvd and chan.exit_status_ready():\n break\n output = output.rstrip(b'\\r\\n')\n return chan.recv_exit_status(), output\n finally:\n server_err.done()\n chan.close()\n", "def check_call(self, cmd):\n \"\"\"Calls a command through SSH.\n \"\"\"\n ret, _ = self._call(cmd, False)\n if ret != 0: # pragma: no cover\n raise RemoteCommandFailure(command=cmd, ret=ret)\n", "def check_output(self, cmd):\n \"\"\"Calls a command through SSH and returns its output.\n \"\"\"\n ret, output = self._call(cmd, True)\n if ret != 0: # pragma: no cover\n raise RemoteCommandFailure(command=cmd, ret=ret)\n logger.debug(\"Output: %r\", output)\n return output\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.submit
python
def submit(self, job_id, directory, script=None): if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id
Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L464-L516
[ "def make_unique_name():\n \"\"\"Makes a unique (random) string.\n \"\"\"\n return next(_unique_names)\n", "def shell_escape(s):\n r\"\"\"Given bl\"a, returns \"bl\\\\\"a\".\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if not s or any(c not in safe_shell_chars for c in s):\n return '\"%s\"' % (s.replace('\\\\', '\\\\\\\\')\n .replace('\"', '\\\\\"')\n .replace('`', '\\\\`')\n .replace('$', '\\\\$'))\n else:\n return s\n", "def check_jobid(job_id):\n if not all(c in JOB_ID_CHARS for c in job_id):\n raise ValueError(\"Invalid job identifier\")\n", "def get_scp_client(self):\n return scp.SCPClient(self.get_client().get_transport())\n", "def _call(self, cmd, get_output):\n \"\"\"Calls a command through the SSH connection.\n\n Remote stderr gets printed to this program's stderr. Output is captured\n and may be returned.\n \"\"\"\n server_err = self.server_logger()\n\n chan = self.get_client().get_transport().open_session()\n try:\n logger.debug(\"Invoking %r%s\",\n cmd, \" (stdout)\" if get_output else \"\")\n chan.exec_command('/bin/sh -c %s' % shell_escape(cmd))\n output = b''\n while True:\n r, w, e = select.select([chan], [], [])\n if chan not in r:\n continue # pragma: no cover\n recvd = False\n while chan.recv_stderr_ready():\n data = chan.recv_stderr(1024)\n server_err.append(data)\n recvd = True\n while chan.recv_ready():\n data = chan.recv(1024)\n if get_output:\n output += data\n recvd = True\n if not recvd and chan.exit_status_ready():\n break\n output = output.rstrip(b'\\r\\n')\n return chan.recv_exit_status(), output\n finally:\n server_err.done()\n chan.close()\n", "def check_call(self, cmd):\n \"\"\"Calls a command through SSH.\n \"\"\"\n ret, _ = self._call(cmd, False)\n if ret != 0: # pragma: no cover\n raise RemoteCommandFailure(command=cmd, ret=ret)\n", "def _get_queue(self):\n \"\"\"Gets the actual location of the queue, or None.\n \"\"\"\n if self._queue is None:\n self._links = []\n queue, depth = self._resolve_queue(self.queue, links=self._links)\n if queue is None and depth > 0:\n raise QueueLinkBroken\n self._queue = queue\n return self._queue\n", "def _setup(self):\n \"\"\"Actually installs the runtime.\n \"\"\"\n # Expands ~user in queue\n if self.queue.path[0:1] == b'/':\n queue = self.queue\n else:\n if self.queue.path[0:1] == b'~':\n output = self.check_output('echo %s' %\n escape_queue(self.queue))\n queue = PosixPath(output.rstrip(b'\\r\\n'))\n else:\n output = self.check_output('pwd')\n queue = PosixPath(output.rstrip(b'\\r\\n')) / self.queue\n logger.debug(\"Resolved to %s\", queue)\n\n # Select runtime\n if not self.setup_runtime:\n # Autoselect\n if self._call('which qsub', False)[0] == 0:\n logger.debug(\"qsub is available, using runtime 'pbs'\")\n runtime = 'pbs'\n else:\n logger.debug(\"qsub not found, using runtime 'default'\")\n runtime = 'default'\n else:\n runtime = self.setup_runtime\n\n if self.need_runtime is not None and runtime not in self.need_runtime:\n raise ValueError(\"About to setup runtime %s but that wouldn't \"\n \"match explicitely allowed runtimes\" % runtime)\n\n logger.info(\"Installing runtime %s%s at %s\",\n runtime,\n \"\" if self.setup_runtime else \" (auto)\",\n self.queue)\n\n # Uploads runtime\n scp_client = self.get_scp_client()\n filename = pkg_resources.resource_filename('tej',\n 'remotes/%s' % runtime)\n scp_client.put(filename, str(queue), recursive=True)\n logger.debug(\"Files uploaded\")\n\n # Runs post-setup script\n self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup'))\n logger.debug(\"Post-setup script done\")\n\n self._queue = queue\n return queue\n", "def delete(self, job_id):\n \"\"\"Deletes a job from the server.\n \"\"\"\n check_jobid(job_id)\n\n queue = self._get_queue()\n if queue is None:\n raise QueueDoesntExist\n\n ret, output = self._call('%s %s' % (\n shell_escape(queue / 'commands/delete'),\n job_id),\n False)\n if ret == 3:\n raise JobNotFound\n elif ret == 2:\n raise JobStillRunning\n elif ret != 0:\n raise RemoteCommandFailure(command='commands/delete',\n ret=ret)\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.status
python
def status(self, job_id): check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret)
Gets the status of a previously-submitted job.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L518-L542
[ "def shell_escape(s):\n r\"\"\"Given bl\"a, returns \"bl\\\\\"a\".\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if not s or any(c not in safe_shell_chars for c in s):\n return '\"%s\"' % (s.replace('\\\\', '\\\\\\\\')\n .replace('\"', '\\\\\"')\n .replace('`', '\\\\`')\n .replace('$', '\\\\$'))\n else:\n return s\n", "def check_jobid(job_id):\n if not all(c in JOB_ID_CHARS for c in job_id):\n raise ValueError(\"Invalid job identifier\")\n", "def _call(self, cmd, get_output):\n \"\"\"Calls a command through the SSH connection.\n\n Remote stderr gets printed to this program's stderr. Output is captured\n and may be returned.\n \"\"\"\n server_err = self.server_logger()\n\n chan = self.get_client().get_transport().open_session()\n try:\n logger.debug(\"Invoking %r%s\",\n cmd, \" (stdout)\" if get_output else \"\")\n chan.exec_command('/bin/sh -c %s' % shell_escape(cmd))\n output = b''\n while True:\n r, w, e = select.select([chan], [], [])\n if chan not in r:\n continue # pragma: no cover\n recvd = False\n while chan.recv_stderr_ready():\n data = chan.recv_stderr(1024)\n server_err.append(data)\n recvd = True\n while chan.recv_ready():\n data = chan.recv(1024)\n if get_output:\n output += data\n recvd = True\n if not recvd and chan.exit_status_ready():\n break\n output = output.rstrip(b'\\r\\n')\n return chan.recv_exit_status(), output\n finally:\n server_err.done()\n chan.close()\n", "def _get_queue(self):\n \"\"\"Gets the actual location of the queue, or None.\n \"\"\"\n if self._queue is None:\n self._links = []\n queue, depth = self._resolve_queue(self.queue, links=self._links)\n if queue is None and depth > 0:\n raise QueueLinkBroken\n self._queue = queue\n return self._queue\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.download
python
def download(self, job_id, files, **kwargs): check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive)
Downloads files from server.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L544-L582
[ "def check_jobid(job_id):\n if not all(c in JOB_ID_CHARS for c in job_id):\n raise ValueError(\"Invalid job identifier\")\n", "def get_scp_client(self):\n return scp.SCPClient(self.get_client().get_transport())\n", "def status(self, job_id):\n \"\"\"Gets the status of a previously-submitted job.\n \"\"\"\n check_jobid(job_id)\n\n queue = self._get_queue()\n if queue is None:\n raise QueueDoesntExist\n\n ret, output = self._call('%s %s' % (\n shell_escape(queue / 'commands/status'),\n job_id),\n True)\n if ret == 0:\n directory, result = output.splitlines()\n result = result.decode('utf-8')\n return RemoteQueue.JOB_DONE, PosixPath(directory), result\n elif ret == 2:\n directory = output.splitlines()[0]\n return RemoteQueue.JOB_RUNNING, PosixPath(directory), None\n elif ret == 3:\n raise JobNotFound\n else:\n raise RemoteCommandFailure(command=\"commands/status\",\n ret=ret)\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.kill
python
def kill(self, job_id): check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret)
Kills a job on the server.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L584-L601
[ "def shell_escape(s):\n r\"\"\"Given bl\"a, returns \"bl\\\\\"a\".\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if not s or any(c not in safe_shell_chars for c in s):\n return '\"%s\"' % (s.replace('\\\\', '\\\\\\\\')\n .replace('\"', '\\\\\"')\n .replace('`', '\\\\`')\n .replace('$', '\\\\$'))\n else:\n return s\n", "def check_jobid(job_id):\n if not all(c in JOB_ID_CHARS for c in job_id):\n raise ValueError(\"Invalid job identifier\")\n", "def _call(self, cmd, get_output):\n \"\"\"Calls a command through the SSH connection.\n\n Remote stderr gets printed to this program's stderr. Output is captured\n and may be returned.\n \"\"\"\n server_err = self.server_logger()\n\n chan = self.get_client().get_transport().open_session()\n try:\n logger.debug(\"Invoking %r%s\",\n cmd, \" (stdout)\" if get_output else \"\")\n chan.exec_command('/bin/sh -c %s' % shell_escape(cmd))\n output = b''\n while True:\n r, w, e = select.select([chan], [], [])\n if chan not in r:\n continue # pragma: no cover\n recvd = False\n while chan.recv_stderr_ready():\n data = chan.recv_stderr(1024)\n server_err.append(data)\n recvd = True\n while chan.recv_ready():\n data = chan.recv(1024)\n if get_output:\n output += data\n recvd = True\n if not recvd and chan.exit_status_ready():\n break\n output = output.rstrip(b'\\r\\n')\n return chan.recv_exit_status(), output\n finally:\n server_err.done()\n chan.close()\n", "def _get_queue(self):\n \"\"\"Gets the actual location of the queue, or None.\n \"\"\"\n if self._queue is None:\n self._links = []\n queue, depth = self._resolve_queue(self.queue, links=self._links)\n if queue is None and depth > 0:\n raise QueueLinkBroken\n self._queue = queue\n return self._queue\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def list(self): """Lists the jobs on the server. """ queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/submission.py
RemoteQueue.list
python
def list(self): queue = self._get_queue() if queue is None: raise QueueDoesntExist output = self.check_output('%s' % shell_escape(queue / 'commands/list')) job_id, info = None, None for line in output.splitlines(): line = line.decode('utf-8') if line.startswith(' '): key, value = line[4:].split(': ', 1) info[key] = value else: if job_id is not None: yield job_id, info job_id = line info = {} if job_id is not None: yield job_id, info
Lists the jobs on the server.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/submission.py#L624-L646
[ "def shell_escape(s):\n r\"\"\"Given bl\"a, returns \"bl\\\\\"a\".\n \"\"\"\n if isinstance(s, PosixPath):\n s = unicode_(s)\n elif isinstance(s, bytes):\n s = s.decode('utf-8')\n if not s or any(c not in safe_shell_chars for c in s):\n return '\"%s\"' % (s.replace('\\\\', '\\\\\\\\')\n .replace('\"', '\\\\\"')\n .replace('`', '\\\\`')\n .replace('$', '\\\\$'))\n else:\n return s\n", "def check_output(self, cmd):\n \"\"\"Calls a command through SSH and returns its output.\n \"\"\"\n ret, output = self._call(cmd, True)\n if ret != 0: # pragma: no cover\n raise RemoteCommandFailure(command=cmd, ret=ret)\n logger.debug(\"Output: %r\", output)\n return output\n", "def _get_queue(self):\n \"\"\"Gets the actual location of the queue, or None.\n \"\"\"\n if self._queue is None:\n self._links = []\n queue, depth = self._resolve_queue(self.queue, links=self._links)\n if queue is None and depth > 0:\n raise QueueLinkBroken\n self._queue = queue\n return self._queue\n" ]
class RemoteQueue(object): JOB_DONE = 'finished' JOB_RUNNING = 'running' JOB_INCOMPLETE = 'incomplete' JOB_CREATED = 'created' PROTOCOL_VERSION = 0, 2 def __init__(self, destination, queue, setup_runtime=None, need_runtime=None): """Creates a queue object, that represents a job queue on a server. :param destination: The address of the server, used to SSH into it. :param queue: The pathname of the queue on the remote server. Something like "~/.tej" is usually adequate. This will contain both the job info and files, and the scripts used to manage it on the server side. :param setup_runtime: The name of the runtime to deploy on the server if the queue doesn't already exist. If None (default), it will auto-detect what is appropriate (currently, `pbs` if the ``qsub`` command is available), and fallback on `default`. If `need_runtime` is set, this should be one of the accepted values. :param need_runtime: A list of runtime names that are acceptable. If the queue already exists on the server and this argument is not None, the installed runtime will be matched against it, and a failure will be reported if it is not one of the provided values. """ if isinstance(destination, string_types): self.destination = parse_ssh_destination(destination) else: if 'hostname' not in destination: raise InvalidDestination("destination dictionary is missing " "hostname") self.destination = destination if setup_runtime not in (None, 'default', 'pbs'): raise ValueError("Selected runtime %r is unknown" % setup_runtime) self.setup_runtime = setup_runtime if need_runtime is not None: self.need_runtime = set(need_runtime) else: self.need_runtime = None self.queue = PosixPath(queue) self._queue = None self._ssh = None self._connect() def server_logger(self): """Handles messages from the server. By default, uses getLogger('tej.server').warning(). Override this in subclasses to provide your own mechanism. """ return ServerLogger() @property def destination_string(self): return destination_as_string(self.destination) def _ssh_client(self): """Gets an SSH client to connect with. """ ssh = paramiko.SSHClient() ssh.load_system_host_keys() ssh.set_missing_host_key_policy(paramiko.RejectPolicy()) return ssh def _connect(self): """Connects via SSH. """ ssh = self._ssh_client() logger.debug("Connecting with %s", ', '.join('%s=%r' % (k, v if k != "password" else "***") for k, v in iteritems(self.destination))) ssh.connect(**self.destination) logger.debug("Connected to %s", self.destination['hostname']) self._ssh = ssh def get_client(self): """Gets the SSH client. This will check that the connection is still alive first, and reconnect if necessary. """ if self._ssh is None: self._connect() return self._ssh else: try: chan = self._ssh.get_transport().open_session() except (socket.error, paramiko.SSHException): logger.warning("Lost connection, reconnecting...") self._ssh.close() self._connect() else: chan.close() return self._ssh def get_scp_client(self): return scp.SCPClient(self.get_client().get_transport()) def _call(self, cmd, get_output): """Calls a command through the SSH connection. Remote stderr gets printed to this program's stderr. Output is captured and may be returned. """ server_err = self.server_logger() chan = self.get_client().get_transport().open_session() try: logger.debug("Invoking %r%s", cmd, " (stdout)" if get_output else "") chan.exec_command('/bin/sh -c %s' % shell_escape(cmd)) output = b'' while True: r, w, e = select.select([chan], [], []) if chan not in r: continue # pragma: no cover recvd = False while chan.recv_stderr_ready(): data = chan.recv_stderr(1024) server_err.append(data) recvd = True while chan.recv_ready(): data = chan.recv(1024) if get_output: output += data recvd = True if not recvd and chan.exit_status_ready(): break output = output.rstrip(b'\r\n') return chan.recv_exit_status(), output finally: server_err.done() chan.close() def check_call(self, cmd): """Calls a command through SSH. """ ret, _ = self._call(cmd, False) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) def check_output(self, cmd): """Calls a command through SSH and returns its output. """ ret, output = self._call(cmd, True) if ret != 0: # pragma: no cover raise RemoteCommandFailure(command=cmd, ret=ret) logger.debug("Output: %r", output) return output def _resolve_queue(self, queue, depth=0, links=None): """Finds the location of tej's queue directory on the server. The `queue` set when constructing this `RemoteQueue` might be relative to the home directory and might contain ``~user`` placeholders. Also, each queue may in fact be a link to another path (a file containing the string ``tejdir:``, a space, and a new pathname, relative to this link's location). """ if depth == 0: logger.debug("resolve_queue(%s)", queue) answer = self.check_output( 'if [ -d %(queue)s ]; then ' ' cd %(queue)s; echo "dir"; cat version; pwd; ' 'elif [ -f %(queue)s ]; then ' ' cat %(queue)s; ' 'else ' ' echo no; ' 'fi' % { 'queue': escape_queue(queue)}) if answer == b'no': if depth > 0: logger.debug("Broken link at depth=%d", depth) else: logger.debug("Path doesn't exist") return None, depth elif answer.startswith(b'dir\n'): version, runtime, path = answer[4:].split(b'\n', 2) try: version = tuple(int(e) for e in version.decode('ascii', 'ignore') .split('.')) except ValueError: version = 0, 0 if version[:2] != self.PROTOCOL_VERSION: raise QueueExists( msg="Queue exists and is using incompatible protocol " "version %s" % '.'.join('%s' % e for e in version)) path = PosixPath(path) runtime = runtime.decode('ascii', 'replace') if self.need_runtime is not None: if (self.need_runtime is not None and runtime not in self.need_runtime): raise QueueExists( msg="Queue exists and is using explicitely disallowed " "runtime %s" % runtime) logger.debug("Found directory at %s, depth=%d, runtime=%s", path, depth, runtime) return path, depth elif answer.startswith(b'tejdir: '): new = queue.parent / answer[8:] logger.debug("Found link to %s, recursing", new) if links is not None: links.append(queue) return self._resolve_queue(new, depth + 1) else: # pragma: no cover logger.debug("Server returned %r", answer) raise RemoteCommandFailure(msg="Queue resolution command failed " "in unexpected way") def _get_queue(self): """Gets the actual location of the queue, or None. """ if self._queue is None: self._links = [] queue, depth = self._resolve_queue(self.queue, links=self._links) if queue is None and depth > 0: raise QueueLinkBroken self._queue = queue return self._queue def setup(self, links=None, force=False, only_links=False): """Installs the runtime at the target location. This will not replace an existing installation, unless `force` is True. After installation, creates links to this installation at the specified locations. """ if not links: links = [] if only_links: logger.info("Only creating links") for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(self.queue), 'link': escape_queue(link)}) return queue, depth = self._resolve_queue(self.queue) if queue is not None or depth > 0: if force: if queue is None: logger.info("Replacing broken link") elif depth > 0: logger.info("Replacing link to %s...", queue) else: logger.info("Replacing existing queue...") self.check_call('rm -Rf %s' % escape_queue(self.queue)) else: if queue is not None and depth > 0: raise QueueExists("Queue already exists (links to %s)\n" "Use --force to replace" % queue) elif depth > 0: raise QueueExists("Broken link exists\n" "Use --force to replace") else: raise QueueExists("Queue already exists\n" "Use --force to replace") queue = self._setup() for link in links: self.check_call('echo "tejdir:" %(queue)s > %(link)s' % { 'queue': escape_queue(queue), 'link': escape_queue(link)}) def _setup(self): """Actually installs the runtime. """ # Expands ~user in queue if self.queue.path[0:1] == b'/': queue = self.queue else: if self.queue.path[0:1] == b'~': output = self.check_output('echo %s' % escape_queue(self.queue)) queue = PosixPath(output.rstrip(b'\r\n')) else: output = self.check_output('pwd') queue = PosixPath(output.rstrip(b'\r\n')) / self.queue logger.debug("Resolved to %s", queue) # Select runtime if not self.setup_runtime: # Autoselect if self._call('which qsub', False)[0] == 0: logger.debug("qsub is available, using runtime 'pbs'") runtime = 'pbs' else: logger.debug("qsub not found, using runtime 'default'") runtime = 'default' else: runtime = self.setup_runtime if self.need_runtime is not None and runtime not in self.need_runtime: raise ValueError("About to setup runtime %s but that wouldn't " "match explicitely allowed runtimes" % runtime) logger.info("Installing runtime %s%s at %s", runtime, "" if self.setup_runtime else " (auto)", self.queue) # Uploads runtime scp_client = self.get_scp_client() filename = pkg_resources.resource_filename('tej', 'remotes/%s' % runtime) scp_client.put(filename, str(queue), recursive=True) logger.debug("Files uploaded") # Runs post-setup script self.check_call('/bin/sh %s' % shell_escape(queue / 'commands/setup')) logger.debug("Post-setup script done") self._queue = queue return queue def submit(self, job_id, directory, script=None): """Submits a job to the queue. If the runtime is not there, it will be installed. If it is a broken chain of links, error. """ if job_id is None: job_id = '%s_%s_%s' % (Path(directory).unicodename, self.destination['username'], make_unique_name()) else: check_jobid(job_id) queue = self._get_queue() if queue is None: queue = self._setup() if script is None: script = 'start.sh' # Create directory ret, target = self._call('%s %s' % ( shell_escape(queue / 'commands/new_job'), job_id), True) if ret == 4: raise JobAlreadyExists elif ret != 0: raise JobNotFound("Couldn't create job") target = PosixPath(target) logger.debug("Server created directory %s", target) # Upload to directory try: scp_client = self.get_scp_client() scp_client.put(str(Path(directory)), str(target), recursive=True) except BaseException as e: try: self.delete(job_id) except BaseException: raise e raise logger.debug("Files uploaded") # Submit job self.check_call('%s %s %s %s' % ( shell_escape(queue / 'commands/submit'), job_id, shell_escape(target), shell_escape(script))) logger.info("Submitted job %s", job_id) return job_id def status(self, job_id): """Gets the status of a previously-submitted job. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/status'), job_id), True) if ret == 0: directory, result = output.splitlines() result = result.decode('utf-8') return RemoteQueue.JOB_DONE, PosixPath(directory), result elif ret == 2: directory = output.splitlines()[0] return RemoteQueue.JOB_RUNNING, PosixPath(directory), None elif ret == 3: raise JobNotFound else: raise RemoteCommandFailure(command="commands/status", ret=ret) def download(self, job_id, files, **kwargs): """Downloads files from server. """ check_jobid(job_id) if not files: return if isinstance(files, string_types): files = [files] directory = False recursive = kwargs.pop('recursive', True) if 'destination' in kwargs and 'directory' in kwargs: raise TypeError("Only use one of 'destination' or 'directory'") elif 'destination' in kwargs: destination = Path(kwargs.pop('destination')) if len(files) != 1: raise ValueError("'destination' specified but multiple files " "given; did you mean to use 'directory'?") elif 'directory' in kwargs: destination = Path(kwargs.pop('directory')) directory = True if kwargs: raise TypeError("Got unexpected keyword arguments") # Might raise JobNotFound status, target, result = self.status(job_id) scp_client = self.get_scp_client() for filename in files: logger.info("Downloading %s", target / filename) if directory: scp_client.get(str(target / filename), str(destination / filename), recursive=recursive) else: scp_client.get(str(target / filename), str(destination), recursive=recursive) def kill(self, job_id): """Kills a job on the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/kill'), job_id), False) if ret == 3: raise JobNotFound elif ret != 0: raise RemoteCommandFailure(command='commands/kill', ret=ret) def delete(self, job_id): """Deletes a job from the server. """ check_jobid(job_id) queue = self._get_queue() if queue is None: raise QueueDoesntExist ret, output = self._call('%s %s' % ( shell_escape(queue / 'commands/delete'), job_id), False) if ret == 3: raise JobNotFound elif ret == 2: raise JobStillRunning elif ret != 0: raise RemoteCommandFailure(command='commands/delete', ret=ret) def cleanup(self, kill=False): queue = self._get_queue() if queue is not None: # Kill jobs for job_id, info in self.list(): if info['status'] == 'running': if not kill: raise JobStillRunning("Can't cleanup, some jobs are " "still running") else: logger.info("Killing running job %s", job_id) self.kill(job_id) # Remove queue logger.info("Removing queue at %s", queue) self.check_call('rm -rf -- %s' % shell_escape(queue)) # Remove links for link in self._links: self.check_call('rm -rf -- %s' % shell_escape(link)) return True
VisTrails/tej
tej/utils.py
shell_escape
python
def shell_escape(s): r"""Given bl"a, returns "bl\\"a". """ if isinstance(s, PosixPath): s = unicode_(s) elif isinstance(s, bytes): s = s.decode('utf-8') if not s or any(c not in safe_shell_chars for c in s): return '"%s"' % (s.replace('\\', '\\\\') .replace('"', '\\"') .replace('`', '\\`') .replace('$', '\\$')) else: return s
r"""Given bl"a, returns "bl\\"a".
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/utils.py#L39-L52
null
"""Utility functions. """ from __future__ import unicode_literals import sys from rpaths import PosixPath PY3 = sys.version_info[0] == 3 if PY3: unicode_ = str string_types = (str,) izip = zip irange = range iteritems = dict.items itervalues = dict.values listvalues = lambda d: list(d.values()) else: unicode_ = unicode # noqa: F821 string_types = (str, unicode) # noqa: F821 import itertools izip = itertools.izip irange = xrange # noqa: F821 iteritems = dict.iteritems itervalues = dict.itervalues listvalues = dict.values safe_shell_chars = set("ABCDEFGHIJKLMNOPQRSTUVWXYZ" "abcdefghijklmnopqrstuvwxyz" "0123456789" "-+=/:.,%_")
VisTrails/tej
tej/main.py
main
python
def main(): # Locale locale.setlocale(locale.LC_ALL, '') # Encoding for output streams if str == bytes: # PY2 writer = codecs.getwriter(locale.getpreferredencoding()) o_stdout, o_stderr = sys.stdout, sys.stderr sys.stdout = writer(sys.stdout) sys.stdout.buffer = o_stdout sys.stderr = writer(sys.stderr) sys.stderr.buffer = o_stderr else: # PY3 sys.stdin = sys.stdin.buffer # Parses command-line # Runtime to setup def add_runtime_option(opt): opt.add_argument( '-r', '--runtime', action='store', help="runtime to deploy on the server if the queue doesn't exist. " "If unspecified, will auto-detect what is appropriate, and " "fallback on 'default'.") # Destination selection def add_destination_option(opt): opt.add_argument('destination', action='store', help="Machine to SSH into; [user@]host[:port]") opt.add_argument('--queue', action='store', default=DEFAULT_TEJ_DIR, help="Directory for tej's files") # Root parser parser = argparse.ArgumentParser( description="Trivial Extensible Job-submission") parser.add_argument('--version', action='version', version="tej version %s" % tej_version) parser.add_argument('-v', '--verbose', action='count', default=1, dest='verbosity', help="augments verbosity level") subparsers = parser.add_subparsers(title="commands", metavar='') # Setup action parser_setup = subparsers.add_parser( 'setup', help="Sets up tej on a remote machine") add_destination_option(parser_setup) add_runtime_option(parser_setup) parser_setup.add_argument('--make-link', action='append', dest='make_link') parser_setup.add_argument('--make-default-link', action='append_const', dest='make_link', const=DEFAULT_TEJ_DIR) parser_setup.add_argument('--force', action='store_true') parser_setup.add_argument('--only-links', action='store_true') parser_setup.set_defaults(func=_setup) # Submit action parser_submit = subparsers.add_parser( 'submit', help="Submits a job to a remote machine") add_destination_option(parser_submit) add_runtime_option(parser_submit) parser_submit.add_argument('--id', action='store', help="Identifier for the new job") parser_submit.add_argument('--script', action='store', help="Relative name of the script in the " "directory") parser_submit.add_argument('directory', action='store', help="Job directory to upload") parser_submit.set_defaults(func=_submit) # Status action parser_status = subparsers.add_parser( 'status', help="Gets the status of a job") add_destination_option(parser_status) parser_status.add_argument('--id', action='store', help="Identifier of the running job") parser_status.set_defaults(func=_status) # Download action parser_download = subparsers.add_parser( 'download', help="Downloads files from finished job") add_destination_option(parser_download) parser_download.add_argument('--id', action='store', help="Identifier of the job") parser_download.add_argument('files', action='store', nargs=argparse.ONE_OR_MORE, help="Files to download") parser_download.set_defaults(func=_download) # Kill action parser_kill = subparsers.add_parser( 'kill', help="Kills a running job") add_destination_option(parser_kill) parser_kill.add_argument('--id', action='store', help="Identifier of the running job") parser_kill.set_defaults(func=_kill) # Delete action parser_delete = subparsers.add_parser( 'delete', help="Deletes a finished job") add_destination_option(parser_delete) parser_delete.add_argument('--id', action='store', help="Identifier of the finished job") parser_delete.set_defaults(func=_delete) # List action parser_list = subparsers.add_parser( 'list', help="Lists remote jobs") add_destination_option(parser_list) parser_list.set_defaults(func=_list) args = parser.parse_args() setup_logging(args.verbosity) try: args.func(args) except Error as e: # No need to show a traceback here, this is not an internal error logger.critical(e) sys.exit(1) sys.exit(0)
Entry point when called on the command-line.
train
https://github.com/VisTrails/tej/blob/b8dedaeb6bdeb650b46cfe6d85e5aa9284fc7f0b/tej/main.py#L103-L231
[ "def setup_logging(verbosity):\n levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG]\n level = levels[min(verbosity, 3)]\n\n fmt = \"%(asctime)s %(levelname)s: %(message)s\"\n formatter = logging.Formatter(fmt)\n\n handler = logging.StreamHandler()\n handler.setFormatter(formatter)\n\n logging.getLogger().addHandler(handler)\n logger.setLevel(level)\n\n # Prints output from server to stderr\n server = logging.getLogger('tej.server')\n server.propagate = False\n raw_console = logging.StreamHandler(sys.stderr)\n raw_console.setFormatter(logging.Formatter('%(message)s'))\n server.addHandler(raw_console)\n", "def add_runtime_option(opt):\n opt.add_argument(\n '-r', '--runtime', action='store',\n help=\"runtime to deploy on the server if the queue doesn't exist. \"\n \"If unspecified, will auto-detect what is appropriate, and \"\n \"fallback on 'default'.\")\n", "def add_destination_option(opt):\n opt.add_argument('destination', action='store',\n help=\"Machine to SSH into; [user@]host[:port]\")\n opt.add_argument('--queue', action='store', default=DEFAULT_TEJ_DIR,\n help=\"Directory for tej's files\")\n" ]
"""Entry point for the tej utility. """ from __future__ import unicode_literals import argparse import codecs import functools import locale import logging import sys from tej import __version__ as tej_version from tej.errors import Error, JobNotFound from tej.submission import DEFAULT_TEJ_DIR, RemoteQueue logger = logging.getLogger('tej') def setup_logging(verbosity): levels = [logging.CRITICAL, logging.WARNING, logging.INFO, logging.DEBUG] level = levels[min(verbosity, 3)] fmt = "%(asctime)s %(levelname)s: %(message)s" formatter = logging.Formatter(fmt) handler = logging.StreamHandler() handler.setFormatter(formatter) logging.getLogger().addHandler(handler) logger.setLevel(level) # Prints output from server to stderr server = logging.getLogger('tej.server') server.propagate = False raw_console = logging.StreamHandler(sys.stderr) raw_console.setFormatter(logging.Formatter('%(message)s')) server.addHandler(raw_console) def needs_job_id(f): @functools.wraps(f) def wrapped(args): if args.id is None: logger.critical("Missing job identifier") sys.exit(1) return f(args) return wrapped def _setup(args): queue = RemoteQueue(args.destination, args.queue, setup_runtime=args.runtime) queue.setup(args.make_link, args.force, args.only_links) def _submit(args): queue = RemoteQueue(args.destination, args.queue) job_id = queue.submit(args.id, args.directory, args.script) print(job_id) @needs_job_id def _status(args): try: queue = RemoteQueue(args.destination, args.queue) status, directory, arg = queue.status(args.id) if status == RemoteQueue.JOB_DONE: sys.stdout.write("finished") elif status == RemoteQueue.JOB_RUNNING: sys.stdout.write("running") else: # pragma: no cover raise RuntimeError("Got unknown job status %r" % status) if arg is not None: sys.stdout.write(' %s' % arg) sys.stdout.write('\n') except JobNotFound: print("not found") @needs_job_id def _download(args): RemoteQueue(args.destination, args.queue).download(args.id, args.files, directory='.') @needs_job_id def _kill(args): RemoteQueue(args.destination, args.queue).kill(args.id) @needs_job_id def _delete(args): RemoteQueue(args.destination, args.queue).delete(args.id) def _list(args): for job_id, info in RemoteQueue(args.destination, args.queue).list(): sys.stdout.write("%s %s\n" % (job_id, info['status'])) if __name__ == '__main__': # pragma: no cover main()
the01/python-flotils
flotils/loadable.py
load_json
python
def load_json(json_data, decoder=None): if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode)
Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L131-L144
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
load_json_file
python
def load_json_file(file, decoder=None): if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode)
Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L147-L163
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
save_json
python
def save_json(val, pretty=False, sort=True, encoder=None): if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data
Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L166-L201
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
save_json_file
python
def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close()
Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L204-L257
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
load_yaml_file
python
def load_yaml_file(file): if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader)
Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L272-L284
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
save_yaml_file
python
def save_yaml_file(file, val): opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close()
Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L299-L318
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
load_file
python
def load_file(path): res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res
Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L321-L349
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
save_file
python
def save_file(path, data, readable=False): if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e)
Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L352-L383
[ "def save_json_file(\n file, val,\n pretty=False, compact=True, sort=True, encoder=None\n):\n \"\"\"\n Save data to json file\n\n :param file: Writable object or path to file\n :type file: FileIO | str | unicode\n :param val: Value or struct to save\n :type val: None | int | float | str | list | dict\n :param pretty: Format data to be readable (default: False)\n :type pretty: bool\n :param compact: Format data to be compact (default: True)\n :type compact: bool\n :param sort: Sort keys (default: True)\n :type sort: bool\n :param encoder: Use custom json encoder\n :type encoder: T <= DateTimeEncoder\n :rtype: None\n \"\"\"\n # TODO: make pretty/compact into one bool?\n if encoder is None:\n encoder = DateTimeEncoder\n opened = False\n\n if not hasattr(file, \"write\"):\n file = io.open(file, \"w\", encoding=\"utf-8\")\n opened = True\n\n try:\n if pretty:\n data = json.dumps(\n val,\n indent=4,\n separators=(',', ': '),\n sort_keys=sort,\n cls=encoder\n )\n elif compact:\n data = json.dumps(\n val,\n separators=(',', ':'),\n sort_keys=sort,\n cls=encoder\n )\n else:\n data = json.dumps(val, sort_keys=sort, cls=encoder)\n if not sys.version_info > (3, 0) and isinstance(data, str):\n data = data.decode(\"utf-8\")\n file.write(data)\n finally:\n if opened:\n file.close()\n" ]
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def join_path_prefix(path, pre_path=None): """ If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None """ if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
join_path_prefix
python
def join_path_prefix(path, pre_path=None): if not path: return path if pre_path and not os.path.isabs(path): return os.path.join(pre_path, path) return path
If path set and not absolute, append it to pre path (if used) :param path: path to append :type path: str | None :param pre_path: Base path to append to (default: None) :type pre_path: None | str :return: Path or appended path :rtype: str | None
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L386-L403
null
# -*- coding: UTF-8 -*- from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals """ Module for loading/saving data/classes with json """ __author__ = "the01" __email__ = "jungflor@gmail.com" __copyright__ = "Copyright (C) 2013-19, Florian JUNG" __license__ = "MIT" __version__ = "0.4.1" __date__ = "2019-04-14" # Created: 2014-08-29 09:38 import os import datetime import json import io import sys import yaml from .logable import Logable, ModuleLogable class Logger(ModuleLogable): pass logger = Logger() # TODO: New format according to https://gist.github.com/majgis/4200488 class DateTimeEncoder(json.JSONEncoder): """ Encode datetime, date and time objects for json """ def default(self, obj): if isinstance(obj, datetime.datetime): # save all without tz info as UTC if obj.tzinfo: obj = (obj - obj.tzinfo.utcoffset(obj)).replace(tzinfo=None) return {'__datetime__': obj.isoformat() + "Z"} elif isinstance(obj, datetime.date): return {'__date__': obj.isoformat()} elif isinstance(obj, datetime.timedelta): # Time delta only stores days, seconds and microseconds return { '__type__': "timedelta", 'days': obj.days, 'seconds': obj.seconds, 'microseconds': obj.microseconds, } elif isinstance(obj, datetime.time): return {'__time__': obj.isoformat()} return super(DateTimeEncoder, self).default(obj) # TODO: Load datetime as utc but without tzinfo set class DateTimeDecoder(object): """ Decode datetime, date and time from json """ @staticmethod def _as_datetime(dct): if "__datetime__" in dct.keys(): # Should be UTC try: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%S.%fZ" ) except ValueError: return datetime.datetime.strptime( dct['__datetime__'], "%Y-%m-%dT%H:%M:%SZ" ) raise TypeError("Not Datetime") @staticmethod def _as_date(dct): if "__date__" in dct: d = datetime.datetime.strptime( dct['__date__'], "%Y-%m-%d" ) if d: return d.date() return d raise TypeError("Not Date") @staticmethod def _as_time(dct): if "__time__" in dct: try: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S.%f" ) except ValueError: d = datetime.datetime.strptime( dct['__time__'], "%H:%M:%S" ) if d: return d.time() return d raise TypeError("Not Time") @staticmethod def decode(dct): if not isinstance(dct, dict): return dct if "__type__" in dct: obj_type = dct.pop('__type__') if obj_type == "timedelta": return datetime.timedelta(**dct) # Not matched dct['__type__'] = obj_type try: return DateTimeDecoder._as_datetime(dct) except: try: return DateTimeDecoder._as_date(dct) except: try: return DateTimeDecoder._as_time(dct) except: return dct def load_json(json_data, decoder=None): """ Load data from json string :param json_data: Stringified json object :type json_data: str | unicode :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder return json.loads(json_data, object_hook=decoder.decode) def load_json_file(file, decoder=None): """ Load data from json file :param file: Readable object or path to file :type file: FileIO | str :param decoder: Use custom json decoder :type decoder: T <= DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict """ if decoder is None: decoder = DateTimeDecoder if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return json.load(f, object_hook=decoder.decode) return json.load(file, object_hook=decoder.decode) def save_json(val, pretty=False, sort=True, encoder=None): """ Save data to json string :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) otherwise going to be compact :type pretty: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :return: The jsonified string :rtype: str | unicode """ if encoder is None: encoder = DateTimeEncoder if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) else: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") return data def save_json_file( file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= DateTimeEncoder :rtype: None """ # TODO: make pretty/compact into one bool? if encoder is None: encoder = DateTimeEncoder opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: if pretty: data = json.dumps( val, indent=4, separators=(',', ': '), sort_keys=sort, cls=encoder ) elif compact: data = json.dumps( val, separators=(',', ':'), sort_keys=sort, cls=encoder ) else: data = json.dumps(val, sort_keys=sort, cls=encoder) if not sys.version_info > (3, 0) and isinstance(data, str): data = data.decode("utf-8") file.write(data) finally: if opened: file.close() def load_yaml(data): """ Load data from yaml string :param data: Stringified yaml object :type data: str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ return yaml.load(data, yaml.FullLoader) def load_yaml_file(file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict """ if not hasattr(file, "read"): with io.open(file, "r", encoding="utf-8") as f: return yaml.load(f, yaml.FullLoader) return yaml.load(file, yaml.FullLoader) def save_yaml(val): """ Save data to yaml string :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :return: The yamlified string :rtype: str | unicode """ return yaml.dump(val) def save_yaml_file(file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict """ opened = False if not hasattr(file, "write"): file = io.open(file, "w", encoding="utf-8") opened = True try: yaml.dump(val, file) finally: if opened: file.close() def load_file(path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded data :rtype: None | int | float | str | unicode | list | dict :raises IOError: If file not found or error accessing file """ res = {} if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = load_yaml_file(f) except IOError: raise except Exception as e: raise IOError(e) return res def save_file(path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: Data to save :type data: None | int | float | str | unicode | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): save_yaml_file(f, data) except IOError: raise except Exception as e: raise IOError(e) class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable._load_json_file
python
def _load_json_file(self, file, decoder=None): try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res
Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L470-L492
null
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable._save_json_file
python
def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed")
Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L494-L520
[ "def save_json_file(\n file, val,\n pretty=False, compact=True, sort=True, encoder=None\n):\n \"\"\"\n Save data to json file\n\n :param file: Writable object or path to file\n :type file: FileIO | str | unicode\n :param val: Value or struct to save\n :type val: None | int | float | str | list | dict\n :param pretty: Format data to be readable (default: False)\n :type pretty: bool\n :param compact: Format data to be compact (default: True)\n :type compact: bool\n :param sort: Sort keys (default: True)\n :type sort: bool\n :param encoder: Use custom json encoder\n :type encoder: T <= DateTimeEncoder\n :rtype: None\n \"\"\"\n # TODO: make pretty/compact into one bool?\n if encoder is None:\n encoder = DateTimeEncoder\n opened = False\n\n if not hasattr(file, \"write\"):\n file = io.open(file, \"w\", encoding=\"utf-8\")\n opened = True\n\n try:\n if pretty:\n data = json.dumps(\n val,\n indent=4,\n separators=(',', ': '),\n sort_keys=sort,\n cls=encoder\n )\n elif compact:\n data = json.dumps(\n val,\n separators=(',', ':'),\n sort_keys=sort,\n cls=encoder\n )\n else:\n data = json.dumps(val, sort_keys=sort, cls=encoder)\n if not sys.version_info > (3, 0) and isinstance(data, str):\n data = data.decode(\"utf-8\")\n file.write(data)\n finally:\n if opened:\n file.close()\n", "def exception(self, msg, *args, **kwargs):\n self._logger.exception(\n msg,\n extra={'function': self._get_function_name()},\n *args, **kwargs\n )\n" ]
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable._load_yaml_file
python
def _load_yaml_file(self, file): try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res
Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L522-L537
null
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable._save_yaml_file
python
def _save_yaml_file(self, file, val): try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed")
Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L539-L553
null
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable.load_settings
python
def load_settings(self, path): res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res
Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L555-L569
[ "def load_file(self, path):\n \"\"\"\n Load file\n\n :param path: Path to file\n :type path: str | unicode\n :return: Loaded settings\n :rtype: None | str | unicode | int | list | dict\n :raises IOError: If file not found or error accessing file\n \"\"\"\n res = None\n\n if not path:\n IOError(\"No path specified to save\")\n\n if not os.path.isfile(path):\n raise IOError(\"File not found {}\".format(path))\n\n try:\n with io.open(path, \"r\", encoding=\"utf-8\") as f:\n if path.endswith(\".json\"):\n res = self._load_json_file(f)\n elif path.endswith(\".yaml\") or path.endswith(\".yml\"):\n res = self._load_yaml_file(f)\n except IOError:\n raise\n except Exception as e:\n self.exception(\"Failed reading {}\".format(path))\n raise IOError(e)\n return res\n" ]
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable.save_settings
python
def save_settings(self, path, settings, readable=False): if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable)
Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L571-L587
[ "def save_file(self, path, data, readable=False):\n \"\"\"\n Save to file\n\n :param path: File path to save\n :type path: str | unicode\n :param data: To save\n :type data: None | str | unicode | int | list | dict\n :param readable: Format file to be human readable (default: False)\n :type readable: bool\n :rtype: None\n :raises IOError: If empty path or error writing file\n \"\"\"\n if not path:\n IOError(\"No path specified to save\")\n\n try:\n with io.open(path, \"w\", encoding=\"utf-8\") as f:\n if path.endswith(\".json\"):\n self._save_json_file(\n f,\n data,\n pretty=readable,\n compact=(not readable),\n sort=True\n )\n elif path.endswith(\".yaml\") or path.endswith(\".yml\"):\n self._save_yaml_file(f, data)\n except IOError:\n raise\n except Exception as e:\n self.exception(\"Failed writing {}\".format(path))\n raise IOError(e)\n" ]
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable.load_file
python
def load_file(self, path): res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res
Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L589-L618
[ "def exception(self, msg, *args, **kwargs):\n self._logger.exception(\n msg,\n extra={'function': self._get_function_name()},\n *args, **kwargs\n )\n" ]
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def save_file(self, path, data, readable=False): """ Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file """ if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
the01/python-flotils
flotils/loadable.py
Loadable.save_file
python
def save_file(self, path, data, readable=False): if not path: IOError("No path specified to save") try: with io.open(path, "w", encoding="utf-8") as f: if path.endswith(".json"): self._save_json_file( f, data, pretty=readable, compact=(not readable), sort=True ) elif path.endswith(".yaml") or path.endswith(".yml"): self._save_yaml_file(f, data) except IOError: raise except Exception as e: self.exception("Failed writing {}".format(path)) raise IOError(e)
Save to file :param path: File path to save :type path: str | unicode :param data: To save :type data: None | str | unicode | int | list | dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/loadable.py#L620-L652
[ "def _save_json_file(\n self, file, val,\n pretty=False, compact=True, sort=True, encoder=None\n):\n \"\"\"\n Save data to json file\n\n :param file: Writable file or path to file\n :type file: FileIO | str | unicode\n :param val: Value or struct to save\n :type val: None | int | float | str | list | dict\n :param pretty: Format data to be readable (default: False)\n :type pretty: bool\n :param compact: Format data to be compact (default: True)\n :type compact: bool\n :param sort: Sort keys (default: True)\n :type sort: bool\n :param encoder: Use custom json encoder\n :type encoder: T <= flotils.loadable.DateTimeEncoder\n :rtype: None\n :raises IOError: Failed to save\n \"\"\"\n try:\n save_json_file(file, val, pretty, compact, sort, encoder)\n except:\n self.exception(\"Failed to save to {}\".format(file))\n raise IOError(\"Saving file failed\")\n", "def exception(self, msg, *args, **kwargs):\n self._logger.exception(\n msg,\n extra={'function': self._get_function_name()},\n *args, **kwargs\n )\n" ]
class Loadable(Logable): """ Class to facilitate loading config from json-files and ease relative paths """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None :raises IOError: Failed to load settings file """ if settings is None: settings = {} super(Loadable, self).__init__(settings) sett_path = settings.get('settings_file', None) self._pre_path = settings.get('path_prefix', None) if sett_path: sett_path = self.join_path_prefix(sett_path) sett = self.load_settings(sett_path) sett_prepath = sett.get('path_prefix') if sett_prepath: # if sett_path is absolute path # -> set to sett_path else join with dict path_prefix self._pre_path = self.join_path_prefix(sett_prepath) # settings in constructor overwrite settings from file sett.update(settings) settings.update(sett) self.debug("Loaded config {}".format(sett_path)) # to apply loaded settings to logable as well super(Loadable, self).__init__(settings) @property def _prePath(self): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) return self._pre_path @_prePath.setter def set_prePath(self, value): import warnings warnings.warn( "This variable is no longer in use - Please use _pre_path instead", DeprecationWarning ) self._pre_path = value def join_path_prefix(self, path): """ If path set and not absolute, append it to self._pre_path :param path: Path to append :type path: str | None :return: Path or appended path :rtype: str | None """ return join_path_prefix(path, self._pre_path) def _load_json_file(self, file, decoder=None): """ Load data from json file :param file: Readable file or path to file :type file: FileIO | str | unicode :param decoder: Use custom json decoder :type decoder: T <= flotils.loadable.DateTimeDecoder :return: Json data :rtype: None | int | float | str | list | dict :raises IOError: Failed to load """ try: res = load_json_file(file, decoder=decoder) except ValueError as e: if "{}".format(e) == "No JSON object could be decoded": raise IOError("Decoding JSON failed") self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_json_file( self, file, val, pretty=False, compact=True, sort=True, encoder=None ): """ Save data to json file :param file: Writable file or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | list | dict :param pretty: Format data to be readable (default: False) :type pretty: bool :param compact: Format data to be compact (default: True) :type compact: bool :param sort: Sort keys (default: True) :type sort: bool :param encoder: Use custom json encoder :type encoder: T <= flotils.loadable.DateTimeEncoder :rtype: None :raises IOError: Failed to save """ try: save_json_file(file, val, pretty, compact, sort, encoder) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def _load_yaml_file(self, file): """ Load data from yaml file :param file: Readable object or path to file :type file: FileIO | str | unicode :return: Yaml data :rtype: None | int | float | str | unicode | list | dict :raises IOError: Failed to load """ try: res = load_yaml_file(file) except: self.exception("Failed to load from {}".format(file)) raise IOError("Loading file failed") return res def _save_yaml_file(self, file, val): """ Save data to yaml file :param file: Writable object or path to file :type file: FileIO | str | unicode :param val: Value or struct to save :type val: None | int | float | str | unicode | list | dict :raises IOError: Failed to save """ try: save_yaml_file(file, val) except: self.exception("Failed to save to {}".format(file)) raise IOError("Saving file failed") def load_settings(self, path): """ Load settings dict :param path: Path to settings file :type path: str | unicode :return: Loaded settings :rtype: dict :raises IOError: If file not found or error accessing file :raises TypeError: Settings file does not contain dict """ res = self.load_file(path) if not isinstance(res, dict): raise TypeError("Expected settings to be dict") return res def save_settings(self, path, settings, readable=False): """ Save settings to file :param path: File path to save :type path: str | unicode :param settings: Settings to save :type settings: dict :param readable: Format file to be human readable (default: False) :type readable: bool :rtype: None :raises IOError: If empty path or error writing file :raises TypeError: Settings is not a dict """ if not isinstance(settings, dict): raise TypeError("Expected settings to be dict") return self.save_file(path, settings, readable) def load_file(self, path): """ Load file :param path: Path to file :type path: str | unicode :return: Loaded settings :rtype: None | str | unicode | int | list | dict :raises IOError: If file not found or error accessing file """ res = None if not path: IOError("No path specified to save") if not os.path.isfile(path): raise IOError("File not found {}".format(path)) try: with io.open(path, "r", encoding="utf-8") as f: if path.endswith(".json"): res = self._load_json_file(f) elif path.endswith(".yaml") or path.endswith(".yml"): res = self._load_yaml_file(f) except IOError: raise except Exception as e: self.exception("Failed reading {}".format(path)) raise IOError(e) return res
the01/python-flotils
flotils/logable.py
Logable.name
python
def name(self): res = type(self).__name__ if self._id: res += ".{}".format(self._id) return res
Get the module name :return: Module name :rtype: str | unicode
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/logable.py#L67-L77
null
class Logable(object): """ Class to facilitate clean logging with class/function/id information """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None """ if settings is None: settings = {} super(Logable, self).__init__() self._id = settings.get('id', None) """ instance id """ self._logger = logging.getLogger(self.name) @property def _get_function_name(self): """ Get function name of calling method :return: The name of the calling function (expected to be called in self.error/debug/..) :rtype: str | unicode """ fname = inspect.getframeinfo(inspect.stack()[2][0]).function if fname == "<module>": return "" else: return fname def log(self, level, msg, *args, **kargs): self._logger.log( level, msg, extra={'function': self._get_function_name()}, *args, **kargs ) def exception(self, msg, *args, **kwargs): self._logger.exception( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def debug(self, msg, *args, **kwargs): self._logger.debug( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def info(self, msg, *args, **kwargs): self._logger.info( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def warning(self, msg, *args, **kwargs): self._logger.warning( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def error(self, msg, *args, **kwargs): self._logger.error( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def critical(self, msg, *args, **kwargs): self._logger.critical( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def fatal(self, msg, *args, **kwargs): self._logger.fatal( msg, extra={'function': self._get_function_name()}, *args, **kwargs )
the01/python-flotils
flotils/logable.py
Logable._get_function_name
python
def _get_function_name(self): fname = inspect.getframeinfo(inspect.stack()[2][0]).function if fname == "<module>": return "" else: return fname
Get function name of calling method :return: The name of the calling function (expected to be called in self.error/debug/..) :rtype: str | unicode
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/logable.py#L79-L91
null
class Logable(object): """ Class to facilitate clean logging with class/function/id information """ def __init__(self, settings=None): """ Initialize object :param settings: Settings for instance (default: None) :type settings: dict | None :rtype: None """ if settings is None: settings = {} super(Logable, self).__init__() self._id = settings.get('id', None) """ instance id """ self._logger = logging.getLogger(self.name) @property def name(self): """ Get the module name :return: Module name :rtype: str | unicode """ res = type(self).__name__ if self._id: res += ".{}".format(self._id) return res def log(self, level, msg, *args, **kargs): self._logger.log( level, msg, extra={'function': self._get_function_name()}, *args, **kargs ) def exception(self, msg, *args, **kwargs): self._logger.exception( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def debug(self, msg, *args, **kwargs): self._logger.debug( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def info(self, msg, *args, **kwargs): self._logger.info( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def warning(self, msg, *args, **kwargs): self._logger.warning( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def error(self, msg, *args, **kwargs): self._logger.error( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def critical(self, msg, *args, **kwargs): self._logger.critical( msg, extra={'function': self._get_function_name()}, *args, **kwargs ) def fatal(self, msg, *args, **kwargs): self._logger.fatal( msg, extra={'function': self._get_function_name()}, *args, **kwargs )
the01/python-flotils
flotils/runable.py
StartStopable.start
python
def start(self, blocking=False): super(StartStopable, self).start() self._is_running = True # blocking try: while blocking and self._is_running: time.sleep(self._start_block_timeout) except IOError as e: if not str(e).lower().startswith("[errno 4]"): raise
Start the interface :param blocking: Should the call block until stop() is called (default: False) :type blocking: bool :rtype: None
train
https://github.com/the01/python-flotils/blob/5954712776bb590107e5b2f4362d010bf74f77a1/flotils/runable.py#L118-L135
[ "def start(self):\n \"\"\"\n Start the interface\n\n :rtype: None\n \"\"\"\n pass\n" ]
class StartStopable(Startable, Stopable): """ Abstract interface to add a start/stop method (e.g. for threading) """ __metaclass__ = ABCMeta def __init__(self, settings=None): """ Initialize object :param settings: Settings to be passed for init (default: None) :type settings: dict | None :rtype: None """ if settings is None: settings = {} super(StartStopable, self).__init__(settings) self._is_running = False """ Indicate whether this object is currently running :type _running: bool """ self._start_block_timeout = settings.get('start_blocking_timeout', 1.0) """ Timeout used to sleep in blocking loop (in seconds) """ @property def is_running(self): """ Is this class currently running :return: Running state :rtype: bool """ return self._is_running def stop(self): """ Stop the interface :rtype: None """ self._is_running = False super(StartStopable, self).stop()
amol-/depot
depot/manager.py
DepotManager.set_default
python
def set_default(cls, name): if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name
Replaces the current application default depot
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/manager.py#L25-L29
null
class DepotManager(object): """Takes care of managing the whole Depot environment for the application. DepotManager tracks the created depots, the current default depot, and the WSGI middleware in charge of serving files for local depots. While this is used to create the default depot used by the application it can also create additional depots using the :meth:`new` method. In case you need to migrate your application to a different storage while keeping compatibility with previously stored file simply change the default depot through :meth:`set_default` all previously stored file will continue to work on the old depot while new files will be uploaded to the new default one. """ _default_depot = None _depots = {} _middleware = None _aliases = {} @classmethod @classmethod def get_default(cls): """Retrieves the current application default depot""" if cls._default_depot is None: raise RuntimeError('Not depots have been configured!') return cls._default_depot @classmethod def set_middleware(cls, mw): if cls._middleware is not None: raise RuntimeError('There is already a WSGI middleware registered') cls._middleware = mw @classmethod def get_middleware(cls): if cls._middleware is None: raise RuntimeError('No WSGI middleware currently registered') return cls._middleware @classmethod def get(cls, name=None): """Gets the application wide depot instance. Might return ``None`` if :meth:`configure` has not been called yet. """ if name is None: name = cls._default_depot name = cls.resolve_alias(name) # resolve alias return cls._depots.get(name) @classmethod def get_file(cls, path): """Retrieves a file by storage name and fileid in the form of a path Path is expected to be ``storage_name/fileid``. """ depot_name, file_id = path.split('/', 1) depot = cls.get(depot_name) return depot.get(file_id) @classmethod def url_for(cls, path): """Given path of a file uploaded on depot returns the url that serves it Path is expected to be ``storage_name/fileid``. """ mw = cls.get_middleware() return mw.url_for(path) @classmethod def configure(cls, name, config, prefix='depot.'): """Configures an application depot. This configures the application wide depot from a settings dictionary. The settings dictionary is usually loaded from an application configuration file where all the depot options are specified with a given ``prefix``. The default ``prefix`` is *depot.*, the minimum required setting is ``depot.backend`` which specified the required backend for files storage. Additional options depend on the choosen backend. """ if name in cls._depots: raise RuntimeError('Depot %s has already been configured' % (name,)) if cls._default_depot is None: cls._default_depot = name cls._depots[name] = cls.from_config(config, prefix) return cls._depots[name] @classmethod def alias(cls, alias, name): if name not in cls._depots: raise ValueError('You can only alias an existing storage, %s not found' % (name, )) if alias in cls._depots: raise ValueError('Cannot use an existing storage name as an alias, will break existing files.') cls._aliases[alias] = name @classmethod def resolve_alias(cls, name): while name and name not in cls._depots: name = cls._aliases.get(name) return name @classmethod def make_middleware(cls, app, **options): """Creates the application WSGI middleware in charge of serving local files. A Depot middleware is required if your application wants to serve files from storages that don't directly provide and HTTP interface like :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage` """ from depot.middleware import DepotMiddleware mw = DepotMiddleware(app, **options) cls.set_middleware(mw) return mw @classmethod def _new(cls, backend, **options): module, classname = backend.rsplit('.', 1) backend = importlib.import_module(module) class_ = getattr(backend, classname) return class_(**options) @classmethod def from_config(cls, config, prefix='depot.'): """Creates a new depot from a settings dictionary. Behaves like the :meth:`configure` method but instead of configuring the application depot it creates a new one each time. """ config = config or {} # Get preferred storage backend backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage') # Get all options prefixlen = len(prefix) options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix)) # Backend is already passed as a positional argument options.pop('backend', None) return cls._new(backend, **options) @classmethod def _clear(cls): """This is only for testing pourposes, resets the DepotManager status This is to simplify writing test fixtures, resets the DepotManager global status and removes the informations related to the current configured depots and middleware. """ cls._default_depot = None cls._depots = {} cls._middleware = None cls._aliases = {}
amol-/depot
depot/manager.py
DepotManager.get
python
def get(cls, name=None): if name is None: name = cls._default_depot name = cls.resolve_alias(name) # resolve alias return cls._depots.get(name)
Gets the application wide depot instance. Might return ``None`` if :meth:`configure` has not been called yet.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/manager.py#L51-L62
[ "def resolve_alias(cls, name):\n while name and name not in cls._depots:\n name = cls._aliases.get(name)\n return name\n" ]
class DepotManager(object): """Takes care of managing the whole Depot environment for the application. DepotManager tracks the created depots, the current default depot, and the WSGI middleware in charge of serving files for local depots. While this is used to create the default depot used by the application it can also create additional depots using the :meth:`new` method. In case you need to migrate your application to a different storage while keeping compatibility with previously stored file simply change the default depot through :meth:`set_default` all previously stored file will continue to work on the old depot while new files will be uploaded to the new default one. """ _default_depot = None _depots = {} _middleware = None _aliases = {} @classmethod def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name @classmethod def get_default(cls): """Retrieves the current application default depot""" if cls._default_depot is None: raise RuntimeError('Not depots have been configured!') return cls._default_depot @classmethod def set_middleware(cls, mw): if cls._middleware is not None: raise RuntimeError('There is already a WSGI middleware registered') cls._middleware = mw @classmethod def get_middleware(cls): if cls._middleware is None: raise RuntimeError('No WSGI middleware currently registered') return cls._middleware @classmethod @classmethod def get_file(cls, path): """Retrieves a file by storage name and fileid in the form of a path Path is expected to be ``storage_name/fileid``. """ depot_name, file_id = path.split('/', 1) depot = cls.get(depot_name) return depot.get(file_id) @classmethod def url_for(cls, path): """Given path of a file uploaded on depot returns the url that serves it Path is expected to be ``storage_name/fileid``. """ mw = cls.get_middleware() return mw.url_for(path) @classmethod def configure(cls, name, config, prefix='depot.'): """Configures an application depot. This configures the application wide depot from a settings dictionary. The settings dictionary is usually loaded from an application configuration file where all the depot options are specified with a given ``prefix``. The default ``prefix`` is *depot.*, the minimum required setting is ``depot.backend`` which specified the required backend for files storage. Additional options depend on the choosen backend. """ if name in cls._depots: raise RuntimeError('Depot %s has already been configured' % (name,)) if cls._default_depot is None: cls._default_depot = name cls._depots[name] = cls.from_config(config, prefix) return cls._depots[name] @classmethod def alias(cls, alias, name): if name not in cls._depots: raise ValueError('You can only alias an existing storage, %s not found' % (name, )) if alias in cls._depots: raise ValueError('Cannot use an existing storage name as an alias, will break existing files.') cls._aliases[alias] = name @classmethod def resolve_alias(cls, name): while name and name not in cls._depots: name = cls._aliases.get(name) return name @classmethod def make_middleware(cls, app, **options): """Creates the application WSGI middleware in charge of serving local files. A Depot middleware is required if your application wants to serve files from storages that don't directly provide and HTTP interface like :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage` """ from depot.middleware import DepotMiddleware mw = DepotMiddleware(app, **options) cls.set_middleware(mw) return mw @classmethod def _new(cls, backend, **options): module, classname = backend.rsplit('.', 1) backend = importlib.import_module(module) class_ = getattr(backend, classname) return class_(**options) @classmethod def from_config(cls, config, prefix='depot.'): """Creates a new depot from a settings dictionary. Behaves like the :meth:`configure` method but instead of configuring the application depot it creates a new one each time. """ config = config or {} # Get preferred storage backend backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage') # Get all options prefixlen = len(prefix) options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix)) # Backend is already passed as a positional argument options.pop('backend', None) return cls._new(backend, **options) @classmethod def _clear(cls): """This is only for testing pourposes, resets the DepotManager status This is to simplify writing test fixtures, resets the DepotManager global status and removes the informations related to the current configured depots and middleware. """ cls._default_depot = None cls._depots = {} cls._middleware = None cls._aliases = {}
amol-/depot
depot/manager.py
DepotManager.get_file
python
def get_file(cls, path): depot_name, file_id = path.split('/', 1) depot = cls.get(depot_name) return depot.get(file_id)
Retrieves a file by storage name and fileid in the form of a path Path is expected to be ``storage_name/fileid``.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/manager.py#L65-L72
[ "def get(cls, name=None):\n \"\"\"Gets the application wide depot instance.\n\n Might return ``None`` if :meth:`configure` has not been\n called yet.\n\n \"\"\"\n if name is None:\n name = cls._default_depot\n\n name = cls.resolve_alias(name) # resolve alias\n return cls._depots.get(name)\n" ]
class DepotManager(object): """Takes care of managing the whole Depot environment for the application. DepotManager tracks the created depots, the current default depot, and the WSGI middleware in charge of serving files for local depots. While this is used to create the default depot used by the application it can also create additional depots using the :meth:`new` method. In case you need to migrate your application to a different storage while keeping compatibility with previously stored file simply change the default depot through :meth:`set_default` all previously stored file will continue to work on the old depot while new files will be uploaded to the new default one. """ _default_depot = None _depots = {} _middleware = None _aliases = {} @classmethod def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name @classmethod def get_default(cls): """Retrieves the current application default depot""" if cls._default_depot is None: raise RuntimeError('Not depots have been configured!') return cls._default_depot @classmethod def set_middleware(cls, mw): if cls._middleware is not None: raise RuntimeError('There is already a WSGI middleware registered') cls._middleware = mw @classmethod def get_middleware(cls): if cls._middleware is None: raise RuntimeError('No WSGI middleware currently registered') return cls._middleware @classmethod def get(cls, name=None): """Gets the application wide depot instance. Might return ``None`` if :meth:`configure` has not been called yet. """ if name is None: name = cls._default_depot name = cls.resolve_alias(name) # resolve alias return cls._depots.get(name) @classmethod @classmethod def url_for(cls, path): """Given path of a file uploaded on depot returns the url that serves it Path is expected to be ``storage_name/fileid``. """ mw = cls.get_middleware() return mw.url_for(path) @classmethod def configure(cls, name, config, prefix='depot.'): """Configures an application depot. This configures the application wide depot from a settings dictionary. The settings dictionary is usually loaded from an application configuration file where all the depot options are specified with a given ``prefix``. The default ``prefix`` is *depot.*, the minimum required setting is ``depot.backend`` which specified the required backend for files storage. Additional options depend on the choosen backend. """ if name in cls._depots: raise RuntimeError('Depot %s has already been configured' % (name,)) if cls._default_depot is None: cls._default_depot = name cls._depots[name] = cls.from_config(config, prefix) return cls._depots[name] @classmethod def alias(cls, alias, name): if name not in cls._depots: raise ValueError('You can only alias an existing storage, %s not found' % (name, )) if alias in cls._depots: raise ValueError('Cannot use an existing storage name as an alias, will break existing files.') cls._aliases[alias] = name @classmethod def resolve_alias(cls, name): while name and name not in cls._depots: name = cls._aliases.get(name) return name @classmethod def make_middleware(cls, app, **options): """Creates the application WSGI middleware in charge of serving local files. A Depot middleware is required if your application wants to serve files from storages that don't directly provide and HTTP interface like :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage` """ from depot.middleware import DepotMiddleware mw = DepotMiddleware(app, **options) cls.set_middleware(mw) return mw @classmethod def _new(cls, backend, **options): module, classname = backend.rsplit('.', 1) backend = importlib.import_module(module) class_ = getattr(backend, classname) return class_(**options) @classmethod def from_config(cls, config, prefix='depot.'): """Creates a new depot from a settings dictionary. Behaves like the :meth:`configure` method but instead of configuring the application depot it creates a new one each time. """ config = config or {} # Get preferred storage backend backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage') # Get all options prefixlen = len(prefix) options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix)) # Backend is already passed as a positional argument options.pop('backend', None) return cls._new(backend, **options) @classmethod def _clear(cls): """This is only for testing pourposes, resets the DepotManager status This is to simplify writing test fixtures, resets the DepotManager global status and removes the informations related to the current configured depots and middleware. """ cls._default_depot = None cls._depots = {} cls._middleware = None cls._aliases = {}
amol-/depot
depot/manager.py
DepotManager.configure
python
def configure(cls, name, config, prefix='depot.'): if name in cls._depots: raise RuntimeError('Depot %s has already been configured' % (name,)) if cls._default_depot is None: cls._default_depot = name cls._depots[name] = cls.from_config(config, prefix) return cls._depots[name]
Configures an application depot. This configures the application wide depot from a settings dictionary. The settings dictionary is usually loaded from an application configuration file where all the depot options are specified with a given ``prefix``. The default ``prefix`` is *depot.*, the minimum required setting is ``depot.backend`` which specified the required backend for files storage. Additional options depend on the choosen backend.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/manager.py#L84-L103
[ "def from_config(cls, config, prefix='depot.'):\n \"\"\"Creates a new depot from a settings dictionary.\n\n Behaves like the :meth:`configure` method but instead of configuring the application\n depot it creates a new one each time.\n \"\"\"\n config = config or {}\n\n # Get preferred storage backend\n backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage')\n\n # Get all options\n prefixlen = len(prefix)\n options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix))\n\n # Backend is already passed as a positional argument\n options.pop('backend', None)\n return cls._new(backend, **options)\n" ]
class DepotManager(object): """Takes care of managing the whole Depot environment for the application. DepotManager tracks the created depots, the current default depot, and the WSGI middleware in charge of serving files for local depots. While this is used to create the default depot used by the application it can also create additional depots using the :meth:`new` method. In case you need to migrate your application to a different storage while keeping compatibility with previously stored file simply change the default depot through :meth:`set_default` all previously stored file will continue to work on the old depot while new files will be uploaded to the new default one. """ _default_depot = None _depots = {} _middleware = None _aliases = {} @classmethod def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name @classmethod def get_default(cls): """Retrieves the current application default depot""" if cls._default_depot is None: raise RuntimeError('Not depots have been configured!') return cls._default_depot @classmethod def set_middleware(cls, mw): if cls._middleware is not None: raise RuntimeError('There is already a WSGI middleware registered') cls._middleware = mw @classmethod def get_middleware(cls): if cls._middleware is None: raise RuntimeError('No WSGI middleware currently registered') return cls._middleware @classmethod def get(cls, name=None): """Gets the application wide depot instance. Might return ``None`` if :meth:`configure` has not been called yet. """ if name is None: name = cls._default_depot name = cls.resolve_alias(name) # resolve alias return cls._depots.get(name) @classmethod def get_file(cls, path): """Retrieves a file by storage name and fileid in the form of a path Path is expected to be ``storage_name/fileid``. """ depot_name, file_id = path.split('/', 1) depot = cls.get(depot_name) return depot.get(file_id) @classmethod def url_for(cls, path): """Given path of a file uploaded on depot returns the url that serves it Path is expected to be ``storage_name/fileid``. """ mw = cls.get_middleware() return mw.url_for(path) @classmethod @classmethod def alias(cls, alias, name): if name not in cls._depots: raise ValueError('You can only alias an existing storage, %s not found' % (name, )) if alias in cls._depots: raise ValueError('Cannot use an existing storage name as an alias, will break existing files.') cls._aliases[alias] = name @classmethod def resolve_alias(cls, name): while name and name not in cls._depots: name = cls._aliases.get(name) return name @classmethod def make_middleware(cls, app, **options): """Creates the application WSGI middleware in charge of serving local files. A Depot middleware is required if your application wants to serve files from storages that don't directly provide and HTTP interface like :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage` """ from depot.middleware import DepotMiddleware mw = DepotMiddleware(app, **options) cls.set_middleware(mw) return mw @classmethod def _new(cls, backend, **options): module, classname = backend.rsplit('.', 1) backend = importlib.import_module(module) class_ = getattr(backend, classname) return class_(**options) @classmethod def from_config(cls, config, prefix='depot.'): """Creates a new depot from a settings dictionary. Behaves like the :meth:`configure` method but instead of configuring the application depot it creates a new one each time. """ config = config or {} # Get preferred storage backend backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage') # Get all options prefixlen = len(prefix) options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix)) # Backend is already passed as a positional argument options.pop('backend', None) return cls._new(backend, **options) @classmethod def _clear(cls): """This is only for testing pourposes, resets the DepotManager status This is to simplify writing test fixtures, resets the DepotManager global status and removes the informations related to the current configured depots and middleware. """ cls._default_depot = None cls._depots = {} cls._middleware = None cls._aliases = {}
amol-/depot
depot/manager.py
DepotManager.make_middleware
python
def make_middleware(cls, app, **options): from depot.middleware import DepotMiddleware mw = DepotMiddleware(app, **options) cls.set_middleware(mw) return mw
Creates the application WSGI middleware in charge of serving local files. A Depot middleware is required if your application wants to serve files from storages that don't directly provide and HTTP interface like :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage`
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/manager.py#L122-L133
[ "def set_middleware(cls, mw):\n if cls._middleware is not None:\n raise RuntimeError('There is already a WSGI middleware registered')\n cls._middleware = mw\n" ]
class DepotManager(object): """Takes care of managing the whole Depot environment for the application. DepotManager tracks the created depots, the current default depot, and the WSGI middleware in charge of serving files for local depots. While this is used to create the default depot used by the application it can also create additional depots using the :meth:`new` method. In case you need to migrate your application to a different storage while keeping compatibility with previously stored file simply change the default depot through :meth:`set_default` all previously stored file will continue to work on the old depot while new files will be uploaded to the new default one. """ _default_depot = None _depots = {} _middleware = None _aliases = {} @classmethod def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name @classmethod def get_default(cls): """Retrieves the current application default depot""" if cls._default_depot is None: raise RuntimeError('Not depots have been configured!') return cls._default_depot @classmethod def set_middleware(cls, mw): if cls._middleware is not None: raise RuntimeError('There is already a WSGI middleware registered') cls._middleware = mw @classmethod def get_middleware(cls): if cls._middleware is None: raise RuntimeError('No WSGI middleware currently registered') return cls._middleware @classmethod def get(cls, name=None): """Gets the application wide depot instance. Might return ``None`` if :meth:`configure` has not been called yet. """ if name is None: name = cls._default_depot name = cls.resolve_alias(name) # resolve alias return cls._depots.get(name) @classmethod def get_file(cls, path): """Retrieves a file by storage name and fileid in the form of a path Path is expected to be ``storage_name/fileid``. """ depot_name, file_id = path.split('/', 1) depot = cls.get(depot_name) return depot.get(file_id) @classmethod def url_for(cls, path): """Given path of a file uploaded on depot returns the url that serves it Path is expected to be ``storage_name/fileid``. """ mw = cls.get_middleware() return mw.url_for(path) @classmethod def configure(cls, name, config, prefix='depot.'): """Configures an application depot. This configures the application wide depot from a settings dictionary. The settings dictionary is usually loaded from an application configuration file where all the depot options are specified with a given ``prefix``. The default ``prefix`` is *depot.*, the minimum required setting is ``depot.backend`` which specified the required backend for files storage. Additional options depend on the choosen backend. """ if name in cls._depots: raise RuntimeError('Depot %s has already been configured' % (name,)) if cls._default_depot is None: cls._default_depot = name cls._depots[name] = cls.from_config(config, prefix) return cls._depots[name] @classmethod def alias(cls, alias, name): if name not in cls._depots: raise ValueError('You can only alias an existing storage, %s not found' % (name, )) if alias in cls._depots: raise ValueError('Cannot use an existing storage name as an alias, will break existing files.') cls._aliases[alias] = name @classmethod def resolve_alias(cls, name): while name and name not in cls._depots: name = cls._aliases.get(name) return name @classmethod @classmethod def _new(cls, backend, **options): module, classname = backend.rsplit('.', 1) backend = importlib.import_module(module) class_ = getattr(backend, classname) return class_(**options) @classmethod def from_config(cls, config, prefix='depot.'): """Creates a new depot from a settings dictionary. Behaves like the :meth:`configure` method but instead of configuring the application depot it creates a new one each time. """ config = config or {} # Get preferred storage backend backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage') # Get all options prefixlen = len(prefix) options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix)) # Backend is already passed as a positional argument options.pop('backend', None) return cls._new(backend, **options) @classmethod def _clear(cls): """This is only for testing pourposes, resets the DepotManager status This is to simplify writing test fixtures, resets the DepotManager global status and removes the informations related to the current configured depots and middleware. """ cls._default_depot = None cls._depots = {} cls._middleware = None cls._aliases = {}
amol-/depot
depot/manager.py
DepotManager.from_config
python
def from_config(cls, config, prefix='depot.'): config = config or {} # Get preferred storage backend backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage') # Get all options prefixlen = len(prefix) options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix)) # Backend is already passed as a positional argument options.pop('backend', None) return cls._new(backend, **options)
Creates a new depot from a settings dictionary. Behaves like the :meth:`configure` method but instead of configuring the application depot it creates a new one each time.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/manager.py#L143-L160
null
class DepotManager(object): """Takes care of managing the whole Depot environment for the application. DepotManager tracks the created depots, the current default depot, and the WSGI middleware in charge of serving files for local depots. While this is used to create the default depot used by the application it can also create additional depots using the :meth:`new` method. In case you need to migrate your application to a different storage while keeping compatibility with previously stored file simply change the default depot through :meth:`set_default` all previously stored file will continue to work on the old depot while new files will be uploaded to the new default one. """ _default_depot = None _depots = {} _middleware = None _aliases = {} @classmethod def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name @classmethod def get_default(cls): """Retrieves the current application default depot""" if cls._default_depot is None: raise RuntimeError('Not depots have been configured!') return cls._default_depot @classmethod def set_middleware(cls, mw): if cls._middleware is not None: raise RuntimeError('There is already a WSGI middleware registered') cls._middleware = mw @classmethod def get_middleware(cls): if cls._middleware is None: raise RuntimeError('No WSGI middleware currently registered') return cls._middleware @classmethod def get(cls, name=None): """Gets the application wide depot instance. Might return ``None`` if :meth:`configure` has not been called yet. """ if name is None: name = cls._default_depot name = cls.resolve_alias(name) # resolve alias return cls._depots.get(name) @classmethod def get_file(cls, path): """Retrieves a file by storage name and fileid in the form of a path Path is expected to be ``storage_name/fileid``. """ depot_name, file_id = path.split('/', 1) depot = cls.get(depot_name) return depot.get(file_id) @classmethod def url_for(cls, path): """Given path of a file uploaded on depot returns the url that serves it Path is expected to be ``storage_name/fileid``. """ mw = cls.get_middleware() return mw.url_for(path) @classmethod def configure(cls, name, config, prefix='depot.'): """Configures an application depot. This configures the application wide depot from a settings dictionary. The settings dictionary is usually loaded from an application configuration file where all the depot options are specified with a given ``prefix``. The default ``prefix`` is *depot.*, the minimum required setting is ``depot.backend`` which specified the required backend for files storage. Additional options depend on the choosen backend. """ if name in cls._depots: raise RuntimeError('Depot %s has already been configured' % (name,)) if cls._default_depot is None: cls._default_depot = name cls._depots[name] = cls.from_config(config, prefix) return cls._depots[name] @classmethod def alias(cls, alias, name): if name not in cls._depots: raise ValueError('You can only alias an existing storage, %s not found' % (name, )) if alias in cls._depots: raise ValueError('Cannot use an existing storage name as an alias, will break existing files.') cls._aliases[alias] = name @classmethod def resolve_alias(cls, name): while name and name not in cls._depots: name = cls._aliases.get(name) return name @classmethod def make_middleware(cls, app, **options): """Creates the application WSGI middleware in charge of serving local files. A Depot middleware is required if your application wants to serve files from storages that don't directly provide and HTTP interface like :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage` """ from depot.middleware import DepotMiddleware mw = DepotMiddleware(app, **options) cls.set_middleware(mw) return mw @classmethod def _new(cls, backend, **options): module, classname = backend.rsplit('.', 1) backend = importlib.import_module(module) class_ = getattr(backend, classname) return class_(**options) @classmethod @classmethod def _clear(cls): """This is only for testing pourposes, resets the DepotManager status This is to simplify writing test fixtures, resets the DepotManager global status and removes the informations related to the current configured depots and middleware. """ cls._default_depot = None cls._depots = {} cls._middleware = None cls._aliases = {}
amol-/depot
depot/manager.py
DepotManager._clear
python
def _clear(cls): cls._default_depot = None cls._depots = {} cls._middleware = None cls._aliases = {}
This is only for testing pourposes, resets the DepotManager status This is to simplify writing test fixtures, resets the DepotManager global status and removes the informations related to the current configured depots and middleware.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/manager.py#L163-L173
null
class DepotManager(object): """Takes care of managing the whole Depot environment for the application. DepotManager tracks the created depots, the current default depot, and the WSGI middleware in charge of serving files for local depots. While this is used to create the default depot used by the application it can also create additional depots using the :meth:`new` method. In case you need to migrate your application to a different storage while keeping compatibility with previously stored file simply change the default depot through :meth:`set_default` all previously stored file will continue to work on the old depot while new files will be uploaded to the new default one. """ _default_depot = None _depots = {} _middleware = None _aliases = {} @classmethod def set_default(cls, name): """Replaces the current application default depot""" if name not in cls._depots: raise RuntimeError('%s depot has not been configured' % (name,)) cls._default_depot = name @classmethod def get_default(cls): """Retrieves the current application default depot""" if cls._default_depot is None: raise RuntimeError('Not depots have been configured!') return cls._default_depot @classmethod def set_middleware(cls, mw): if cls._middleware is not None: raise RuntimeError('There is already a WSGI middleware registered') cls._middleware = mw @classmethod def get_middleware(cls): if cls._middleware is None: raise RuntimeError('No WSGI middleware currently registered') return cls._middleware @classmethod def get(cls, name=None): """Gets the application wide depot instance. Might return ``None`` if :meth:`configure` has not been called yet. """ if name is None: name = cls._default_depot name = cls.resolve_alias(name) # resolve alias return cls._depots.get(name) @classmethod def get_file(cls, path): """Retrieves a file by storage name and fileid in the form of a path Path is expected to be ``storage_name/fileid``. """ depot_name, file_id = path.split('/', 1) depot = cls.get(depot_name) return depot.get(file_id) @classmethod def url_for(cls, path): """Given path of a file uploaded on depot returns the url that serves it Path is expected to be ``storage_name/fileid``. """ mw = cls.get_middleware() return mw.url_for(path) @classmethod def configure(cls, name, config, prefix='depot.'): """Configures an application depot. This configures the application wide depot from a settings dictionary. The settings dictionary is usually loaded from an application configuration file where all the depot options are specified with a given ``prefix``. The default ``prefix`` is *depot.*, the minimum required setting is ``depot.backend`` which specified the required backend for files storage. Additional options depend on the choosen backend. """ if name in cls._depots: raise RuntimeError('Depot %s has already been configured' % (name,)) if cls._default_depot is None: cls._default_depot = name cls._depots[name] = cls.from_config(config, prefix) return cls._depots[name] @classmethod def alias(cls, alias, name): if name not in cls._depots: raise ValueError('You can only alias an existing storage, %s not found' % (name, )) if alias in cls._depots: raise ValueError('Cannot use an existing storage name as an alias, will break existing files.') cls._aliases[alias] = name @classmethod def resolve_alias(cls, name): while name and name not in cls._depots: name = cls._aliases.get(name) return name @classmethod def make_middleware(cls, app, **options): """Creates the application WSGI middleware in charge of serving local files. A Depot middleware is required if your application wants to serve files from storages that don't directly provide and HTTP interface like :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage` """ from depot.middleware import DepotMiddleware mw = DepotMiddleware(app, **options) cls.set_middleware(mw) return mw @classmethod def _new(cls, backend, **options): module, classname = backend.rsplit('.', 1) backend = importlib.import_module(module) class_ = getattr(backend, classname) return class_(**options) @classmethod def from_config(cls, config, prefix='depot.'): """Creates a new depot from a settings dictionary. Behaves like the :meth:`configure` method but instead of configuring the application depot it creates a new one each time. """ config = config or {} # Get preferred storage backend backend = config.get(prefix + 'backend', 'depot.io.local.LocalFileStorage') # Get all options prefixlen = len(prefix) options = dict((k[prefixlen:], config[k]) for k in config.keys() if k.startswith(prefix)) # Backend is already passed as a positional argument options.pop('backend', None) return cls._new(backend, **options) @classmethod
amol-/depot
examples/turbogears/depotexample/websetup/schema.py
setup_schema
python
def setup_schema(command, conf, vars): # Load the models # <websetup.websetup.schema.before.model.import> from depotexample import model # <websetup.websetup.schema.after.model.import> # <websetup.websetup.schema.before.metadata.create_all> print("Creating tables") model.metadata.create_all(bind=config['tg.app_globals'].sa_engine) # <websetup.websetup.schema.after.metadata.create_all> transaction.commit() print('Initializing Migrations') import alembic.config, alembic.command alembic_cfg = alembic.config.Config() alembic_cfg.set_main_option("script_location", "migration") alembic_cfg.set_main_option("sqlalchemy.url", config['sqlalchemy.url']) alembic.command.stamp(alembic_cfg, "head")
Place any commands to setup depotexample here
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/websetup/schema.py#L9-L28
null
# -*- coding: utf-8 -*- """Setup the depotexample application""" from __future__ import print_function import logging from tg import config import transaction def setup_schema(command, conf, vars): """Place any commands to setup depotexample here""" # Load the models # <websetup.websetup.schema.before.model.import> from depotexample import model # <websetup.websetup.schema.after.model.import> # <websetup.websetup.schema.before.metadata.create_all> print("Creating tables") model.metadata.create_all(bind=config['tg.app_globals'].sa_engine) # <websetup.websetup.schema.after.metadata.create_all> transaction.commit() print('Initializing Migrations') import alembic.config, alembic.command alembic_cfg = alembic.config.Config() alembic_cfg.set_main_option("script_location", "migration") alembic_cfg.set_main_option("sqlalchemy.url", config['sqlalchemy.url']) alembic.command.stamp(alembic_cfg, "head")
amol-/depot
examples/turbogears/depotexample/model/auth.py
User.permissions
python
def permissions(self): perms = set() for g in self.groups: perms = perms | set(g.permissions) return perms
Return a set with all permissions granted to the user.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/model/auth.py#L88-L93
null
class User(DeclarativeBase): """ User definition. This is the user definition used by :mod:`repoze.who`, which requires at least the ``user_name`` column. """ __tablename__ = 'tg_user' user_id = Column(Integer, autoincrement=True, primary_key=True) user_name = Column(Unicode(16), unique=True, nullable=False) email_address = Column(Unicode(255), unique=True, nullable=False) display_name = Column(Unicode(255)) _password = Column('password', Unicode(128)) created = Column(DateTime, default=datetime.now) def __repr__(self): return '<User: name=%s, email=%s, display=%s>' % ( repr(self.user_name), repr(self.email_address), repr(self.display_name)) def __unicode__(self): return self.display_name or self.user_name @property @classmethod def by_email_address(cls, email): """Return the user object whose email address is ``email``.""" return DBSession.query(cls).filter_by(email_address=email).first() @classmethod def by_user_name(cls, username): """Return the user object whose user name is ``username``.""" return DBSession.query(cls).filter_by(user_name=username).first() @classmethod def _hash_password(cls, password): salt = sha256() salt.update(os.urandom(60)) salt = salt.hexdigest() hash = sha256() # Make sure password is a str because we cannot hash unicode objects hash.update((password + salt).encode('utf-8')) hash = hash.hexdigest() password = salt + hash return password def _set_password(self, password): """Hash ``password`` on the fly and store its hashed version.""" self._password = self._hash_password(password) def _get_password(self): """Return the hashed version of the password.""" return self._password password = synonym('_password', descriptor=property(_get_password, _set_password)) def validate_password(self, password): """ Check the password against existing credentials. :param password: the password that was provided by the user to try and authenticate. This is the clear text version that we will need to match against the hashed one in the database. :type password: unicode object. :return: Whether the password is valid. :rtype: bool """ hash = sha256() hash.update((password + self.password[:64]).encode('utf-8')) return self.password[64:] == hash.hexdigest()
amol-/depot
examples/turbogears/depotexample/model/auth.py
User.by_email_address
python
def by_email_address(cls, email): return DBSession.query(cls).filter_by(email_address=email).first()
Return the user object whose email address is ``email``.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/model/auth.py#L96-L98
null
class User(DeclarativeBase): """ User definition. This is the user definition used by :mod:`repoze.who`, which requires at least the ``user_name`` column. """ __tablename__ = 'tg_user' user_id = Column(Integer, autoincrement=True, primary_key=True) user_name = Column(Unicode(16), unique=True, nullable=False) email_address = Column(Unicode(255), unique=True, nullable=False) display_name = Column(Unicode(255)) _password = Column('password', Unicode(128)) created = Column(DateTime, default=datetime.now) def __repr__(self): return '<User: name=%s, email=%s, display=%s>' % ( repr(self.user_name), repr(self.email_address), repr(self.display_name)) def __unicode__(self): return self.display_name or self.user_name @property def permissions(self): """Return a set with all permissions granted to the user.""" perms = set() for g in self.groups: perms = perms | set(g.permissions) return perms @classmethod @classmethod def by_user_name(cls, username): """Return the user object whose user name is ``username``.""" return DBSession.query(cls).filter_by(user_name=username).first() @classmethod def _hash_password(cls, password): salt = sha256() salt.update(os.urandom(60)) salt = salt.hexdigest() hash = sha256() # Make sure password is a str because we cannot hash unicode objects hash.update((password + salt).encode('utf-8')) hash = hash.hexdigest() password = salt + hash return password def _set_password(self, password): """Hash ``password`` on the fly and store its hashed version.""" self._password = self._hash_password(password) def _get_password(self): """Return the hashed version of the password.""" return self._password password = synonym('_password', descriptor=property(_get_password, _set_password)) def validate_password(self, password): """ Check the password against existing credentials. :param password: the password that was provided by the user to try and authenticate. This is the clear text version that we will need to match against the hashed one in the database. :type password: unicode object. :return: Whether the password is valid. :rtype: bool """ hash = sha256() hash.update((password + self.password[:64]).encode('utf-8')) return self.password[64:] == hash.hexdigest()
amol-/depot
examples/turbogears/depotexample/model/auth.py
User.by_user_name
python
def by_user_name(cls, username): return DBSession.query(cls).filter_by(user_name=username).first()
Return the user object whose user name is ``username``.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/model/auth.py#L101-L103
null
class User(DeclarativeBase): """ User definition. This is the user definition used by :mod:`repoze.who`, which requires at least the ``user_name`` column. """ __tablename__ = 'tg_user' user_id = Column(Integer, autoincrement=True, primary_key=True) user_name = Column(Unicode(16), unique=True, nullable=False) email_address = Column(Unicode(255), unique=True, nullable=False) display_name = Column(Unicode(255)) _password = Column('password', Unicode(128)) created = Column(DateTime, default=datetime.now) def __repr__(self): return '<User: name=%s, email=%s, display=%s>' % ( repr(self.user_name), repr(self.email_address), repr(self.display_name)) def __unicode__(self): return self.display_name or self.user_name @property def permissions(self): """Return a set with all permissions granted to the user.""" perms = set() for g in self.groups: perms = perms | set(g.permissions) return perms @classmethod def by_email_address(cls, email): """Return the user object whose email address is ``email``.""" return DBSession.query(cls).filter_by(email_address=email).first() @classmethod @classmethod def _hash_password(cls, password): salt = sha256() salt.update(os.urandom(60)) salt = salt.hexdigest() hash = sha256() # Make sure password is a str because we cannot hash unicode objects hash.update((password + salt).encode('utf-8')) hash = hash.hexdigest() password = salt + hash return password def _set_password(self, password): """Hash ``password`` on the fly and store its hashed version.""" self._password = self._hash_password(password) def _get_password(self): """Return the hashed version of the password.""" return self._password password = synonym('_password', descriptor=property(_get_password, _set_password)) def validate_password(self, password): """ Check the password against existing credentials. :param password: the password that was provided by the user to try and authenticate. This is the clear text version that we will need to match against the hashed one in the database. :type password: unicode object. :return: Whether the password is valid. :rtype: bool """ hash = sha256() hash.update((password + self.password[:64]).encode('utf-8')) return self.password[64:] == hash.hexdigest()
amol-/depot
examples/turbogears/depotexample/model/auth.py
User.validate_password
python
def validate_password(self, password): hash = sha256() hash.update((password + self.password[:64]).encode('utf-8')) return self.password[64:] == hash.hexdigest()
Check the password against existing credentials. :param password: the password that was provided by the user to try and authenticate. This is the clear text version that we will need to match against the hashed one in the database. :type password: unicode object. :return: Whether the password is valid. :rtype: bool
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/model/auth.py#L132-L146
null
class User(DeclarativeBase): """ User definition. This is the user definition used by :mod:`repoze.who`, which requires at least the ``user_name`` column. """ __tablename__ = 'tg_user' user_id = Column(Integer, autoincrement=True, primary_key=True) user_name = Column(Unicode(16), unique=True, nullable=False) email_address = Column(Unicode(255), unique=True, nullable=False) display_name = Column(Unicode(255)) _password = Column('password', Unicode(128)) created = Column(DateTime, default=datetime.now) def __repr__(self): return '<User: name=%s, email=%s, display=%s>' % ( repr(self.user_name), repr(self.email_address), repr(self.display_name)) def __unicode__(self): return self.display_name or self.user_name @property def permissions(self): """Return a set with all permissions granted to the user.""" perms = set() for g in self.groups: perms = perms | set(g.permissions) return perms @classmethod def by_email_address(cls, email): """Return the user object whose email address is ``email``.""" return DBSession.query(cls).filter_by(email_address=email).first() @classmethod def by_user_name(cls, username): """Return the user object whose user name is ``username``.""" return DBSession.query(cls).filter_by(user_name=username).first() @classmethod def _hash_password(cls, password): salt = sha256() salt.update(os.urandom(60)) salt = salt.hexdigest() hash = sha256() # Make sure password is a str because we cannot hash unicode objects hash.update((password + salt).encode('utf-8')) hash = hash.hexdigest() password = salt + hash return password def _set_password(self, password): """Hash ``password`` on the fly and store its hashed version.""" self._password = self._hash_password(password) def _get_password(self): """Return the hashed version of the password.""" return self._password password = synonym('_password', descriptor=property(_get_password, _set_password))
amol-/depot
examples/turbogears/depotexample/controllers/error.py
ErrorController.document
python
def document(self, *args, **kwargs): resp = request.environ.get('pylons.original_response') default_message = ("<p>We're sorry but we weren't able to process " " this request.</p>") values = dict(prefix=request.environ.get('SCRIPT_NAME', ''), code=request.params.get('code', resp.status_int), message=request.params.get('message', default_message)) return values
Render the error document
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/controllers/error.py#L22-L30
null
class ErrorController(object): """ Generates error documents as and when they are required. The ErrorDocuments middleware forwards to ErrorController when error related status codes are returned from the application. This behaviour can be altered by changing the parameters to the ErrorDocuments middleware in your config/middleware.py file. """ @expose('depotexample.templates.error')
amol-/depot
examples/turbogears/depotexample/config/middleware.py
make_app
python
def make_app(global_conf, full_stack=True, **app_conf): app = make_base_app(global_conf, full_stack=True, **app_conf) # Wrap your base TurboGears 2 application with custom middleware here from depot.manager import DepotManager app = DepotManager.make_middleware(app) return app
Set depotexample up with the settings found in the PasteDeploy configuration file used. :param global_conf: The global settings for depotexample (those defined under the ``[DEFAULT]`` section). :type global_conf: dict :param full_stack: Should the whole TG2 stack be set up? :type full_stack: str or bool :return: The depotexample application with all the relevant middleware loaded. This is the PasteDeploy factory for the depotexample application. ``app_conf`` contains all the application-specific settings (those defined under ``[app:main]``.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/config/middleware.py#L15-L41
[ "def make_middleware(cls, app, **options):\n \"\"\"Creates the application WSGI middleware in charge of serving local files.\n\n A Depot middleware is required if your application wants to serve files from\n storages that don't directly provide and HTTP interface like\n :class:`depot.io.local.LocalFileStorage` and :class:`depot.io.gridfs.GridFSStorage`\n\n \"\"\"\n from depot.middleware import DepotMiddleware\n mw = DepotMiddleware(app, **options)\n cls.set_middleware(mw)\n return mw\n" ]
# -*- coding: utf-8 -*- """WSGI middleware initialization for the depotexample application.""" from depotexample.config.app_cfg import base_config from depotexample.config.environment import load_environment __all__ = ['make_app'] # Use base_config to setup the necessary PasteDeploy application factory. # make_base_app will wrap the TG2 app with all the middleware it needs. make_base_app = base_config.setup_tg_wsgi_app(load_environment) def make_app(global_conf, full_stack=True, **app_conf): """ Set depotexample up with the settings found in the PasteDeploy configuration file used. :param global_conf: The global settings for depotexample (those defined under the ``[DEFAULT]`` section). :type global_conf: dict :param full_stack: Should the whole TG2 stack be set up? :type full_stack: str or bool :return: The depotexample application with all the relevant middleware loaded. This is the PasteDeploy factory for the depotexample application. ``app_conf`` contains all the application-specific settings (those defined under ``[app:main]``. """ app = make_base_app(global_conf, full_stack=True, **app_conf) # Wrap your base TurboGears 2 application with custom middleware here from depot.manager import DepotManager app = DepotManager.make_middleware(app) return app
amol-/depot
depot/fields/upload.py
UploadedFile.process_content
python
def process_content(self, content, filename=None, content_type=None): file_path, file_id = self.store_content(content, filename, content_type) self['file_id'] = file_id self['path'] = file_path saved_file = self.file self['filename'] = saved_file.filename self['content_type'] = saved_file.content_type self['uploaded_at'] = saved_file.last_modified.strftime('%Y-%m-%d %H:%M:%S') self['_public_url'] = saved_file.public_url
Standard implementation of :meth:`.DepotFileInfo.process_content` This is the standard depot implementation of files upload, it will store the file on the default depot and will provide the standard attributes. Subclasses will need to call this method to ensure the standard set of attributes is provided.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/fields/upload.py#L26-L45
null
class UploadedFile(DepotFileInfo): """Simple :class:`depot.fields.interfaces.DepotFileInfo` implementation that stores files. Takes a file as content and uploads it to the depot while saving around most file information. Pay attention that if the file gets replaced through depot manually the ``UploadedFile`` will continue to have the old data. Also provides support for encoding/decoding using JSON for storage inside databases as a plain string. Default attributes provided for all ``UploadedFile`` include: - filename - This is the name of the uploaded file - file_id - This is the ID of the uploaded file - path - This is a depot_name/file_id path which can be used with :meth:`DepotManager.get_file` to retrieve the file - content_type - This is the content type of the uploaded file - uploaded_at - This is the upload date in YYYY-MM-DD HH:MM:SS format - url - Public url of the uploaded file - file - The :class:`depot.io.interfaces.StoredFile` instance of the stored file """ def store_content(self, content, filename=None, content_type=None): file_id = self.depot.create(content, filename, content_type) file_path = '%s/%s' % (self.depot_name, file_id) self.files.append(file_path) return file_path, file_id def encode(self): return json.dumps(self) @classmethod def decode(cls, data): return cls(json.loads(data)) @property def url(self): public_url = self['_public_url'] if public_url: return public_url return DepotManager.get_middleware().url_for(self['path']) @property def depot(self): return DepotManager.get(self.depot_name) @property def file(self): return self.depot.get(self.file_id)
amol-/depot
examples/turbogears/depotexample/websetup/bootstrap.py
bootstrap
python
def bootstrap(command, conf, vars): # <websetup.bootstrap.before.auth from sqlalchemy.exc import IntegrityError try: u = model.User() u.user_name = 'manager' u.display_name = 'Example manager' u.email_address = 'manager@somedomain.com' u.password = 'managepass' model.DBSession.add(u) g = model.Group() g.group_name = 'managers' g.display_name = 'Managers Group' g.users.append(u) model.DBSession.add(g) p = model.Permission() p.permission_name = 'manage' p.description = 'This permission give an administrative right to the bearer' p.groups.append(g) model.DBSession.add(p) u1 = model.User() u1.user_name = 'editor' u1.display_name = 'Example editor' u1.email_address = 'editor@somedomain.com' u1.password = 'editpass' model.DBSession.add(u1) model.DBSession.flush() transaction.commit() except IntegrityError: print('Warning, there was a problem adding your auth data, it may have already been added:') import traceback print(traceback.format_exc()) transaction.abort() print('Continuing with bootstrapping...')
Place any commands to setup depotexample here
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/websetup/bootstrap.py#L10-L53
null
# -*- coding: utf-8 -*- """Setup the depotexample application""" from __future__ import print_function import logging from tg import config from depotexample import model import transaction def bootstrap(command, conf, vars): """Place any commands to setup depotexample here""" # <websetup.bootstrap.before.auth from sqlalchemy.exc import IntegrityError try: u = model.User() u.user_name = 'manager' u.display_name = 'Example manager' u.email_address = 'manager@somedomain.com' u.password = 'managepass' model.DBSession.add(u) g = model.Group() g.group_name = 'managers' g.display_name = 'Managers Group' g.users.append(u) model.DBSession.add(g) p = model.Permission() p.permission_name = 'manage' p.description = 'This permission give an administrative right to the bearer' p.groups.append(g) model.DBSession.add(p) u1 = model.User() u1.user_name = 'editor' u1.display_name = 'Example editor' u1.email_address = 'editor@somedomain.com' u1.password = 'editpass' model.DBSession.add(u1) model.DBSession.flush() transaction.commit() except IntegrityError: print('Warning, there was a problem adding your auth data, it may have already been added:') import traceback print(traceback.format_exc()) transaction.abort() print('Continuing with bootstrapping...') # <websetup.bootstrap.after.auth>
amol-/depot
depot/io/utils.py
file_from_content
python
def file_from_content(content): f = content if isinstance(content, cgi.FieldStorage): f = content.file elif isinstance(content, FileIntent): f = content._fileobj elif isinstance(content, byte_string): f = SpooledTemporaryFile(INMEMORY_FILESIZE) f.write(content) f.seek(0) return f
Provides a real file object from file content Converts ``FileStorage``, ``FileIntent`` and ``bytes`` to an actual file.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/io/utils.py#L16-L31
null
import cgi import mimetypes import os from datetime import datetime from tempfile import SpooledTemporaryFile from depot._compat import byte_string INMEMORY_FILESIZE = 1024*1024 def timestamp(): return datetime.utcnow().strftime('%Y-%m-%d %H:%M:%S') class FileIntent(object): """Represents the intention to upload a file Whenever a file can be stored by depot, a FileIntent can be passed instead of the file itself. This permits to easily upload objects that are not files or to add missing information to the uploaded files. """ def __init__(self, fileobj, filename, content_type): self._fileobj = fileobj self._filename = filename self._content_type = content_type @property def fileinfo(self): return self._fileobj, self._filename, self._content_type class _FileInfo(object): """Utility class to get a file object, filename and content_type from available data. This is used by most Storage to convert data to a file like object, to find a meaningful filename for the file and to detect the content type of the file. """ DEFAULT_CONTENT_TYPE = 'application/octet-stream' DEFAULT_NAME = 'unnamed' def __init__(self, fileobj, filename=None, content_type=None): self._content, self._filename, self._content_type = self._resolve(fileobj, filename, content_type) def get_info(self, existing=None): if self._filename is None and existing is not None: if callable(existing): existing = existing() return self._content, existing.filename, existing.content_type return ( self._content, self._filename or self.DEFAULT_NAME, self._content_type or self.DEFAULT_CONTENT_TYPE ) def _resolve(self, fileobj, filename, content_type): if isinstance(fileobj, FileIntent): return fileobj.fileinfo content = self._get_content_from_file_obj(fileobj) filename = filename or self._get_filename_from_fileob(fileobj) content_type = content_type or self._get_content_type_from_fileobj(fileobj) if content_type is None and filename is not None: content_type = mimetypes.guess_type(filename, strict=False)[0] return content, filename, content_type def _get_content_from_file_obj(self, fileobj): if isinstance(fileobj, cgi.FieldStorage): return fileobj.file return fileobj def _get_filename_from_fileob(self, fileobj): if getattr(fileobj, 'filename', None) is not None: return fileobj.filename elif getattr(fileobj, 'name', None) is not None: return os.path.basename(fileobj.name) def _get_content_type_from_fileobj(self, fileobj): if getattr(fileobj, 'content_type', None) is not None: return fileobj.content_type elif getattr(fileobj, 'type', None) is not None: return fileobj.type
amol-/depot
depot/io/interfaces.py
FileStorage.fileinfo
python
def fileinfo(fileobj, filename=None, content_type=None, existing=None): return _FileInfo(fileobj, filename, content_type).get_info(existing)
Tries to extract from the given input the actual file object, filename and content_type This is used by the create and replace methods to correctly deduce their parameters from the available information when possible.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/depot/io/interfaces.py#L136-L142
[ "def get_info(self, existing=None):\n if self._filename is None and existing is not None:\n if callable(existing):\n existing = existing()\n return self._content, existing.filename, existing.content_type\n\n return (\n self._content,\n self._filename or self.DEFAULT_NAME,\n self._content_type or self.DEFAULT_CONTENT_TYPE\n )\n" ]
class FileStorage(with_metaclass(ABCMeta, object)): """Interface for storage providers. The FileStorage base class declares a standard interface for storing and retrieving files in an underlying storage system. Each storage system implementation is required to provide this interface to correctly work with filedepot. """ @staticmethod def fileid(file_or_id): """Gets the ID of a given :class:`StoredFile` If the given parameter is already a StoredFile id it will directly return it. """ return getattr(file_or_id, 'file_id', file_or_id) @staticmethod @abstractmethod def get(self, file_or_id): # pragma: no cover """Opens the file given by its unique id. This operation is guaranteed to return a :class:`StoredFile` instance or should raise ``IOError`` if the file is not found. """ return @abstractmethod def create(self, content, filename=None, content_type=None): # pragma: no cover """Saves a new file and returns the ID of the newly created file. ``content`` parameter can either be ``bytes``, another ``file object`` or a :class:`cgi.FieldStorage`. When ``filename`` and ``content_type`` parameters are not provided they are deducted from the content itself. """ return @abstractmethod def replace(self, file_or_id, content, filename=None, content_type=None): # pragma: no cover """Replaces an existing file, an ``IOError`` is raised if the file didn't already exist. Given a :class:`StoredFile` or its ID it will replace the current content with the provided ``content`` value. If ``filename`` and ``content_type`` are provided or can be deducted by the ``content`` itself they will also replace the previous values, otherwise the current values are kept. """ return @abstractmethod def delete(self, file_or_id): # pragma: no cover """Deletes a file. If the file didn't exist it will just do nothing.""" return @abstractmethod def exists(self, file_or_id): # pragma: no cover """Returns if a file or its ID still exist.""" return @abstractmethod def list(self): """Returns a list of file IDs that exist in the Storage. Depending on the implementation there is the possibility that this returns more IDs than there have been created. Therefore this method is NOT guaranteed to be RELIABLE.""" return []
amol-/depot
examples/turbogears/depotexample/controllers/root.py
RootController.login
python
def login(self, came_from=lurl('/')): login_counter = request.environ.get('repoze.who.logins', 0) if login_counter > 0: flash(_('Wrong credentials'), 'warning') return dict(page='login', login_counter=str(login_counter), came_from=came_from)
Start the user login.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/controllers/root.py#L97-L103
null
class RootController(BaseController): """ The root controller for the depotexample application. All the other controllers and WSGI applications should be mounted on this controller. For example:: panel = ControlPanelController() another_app = AnotherWSGIApplication() Keep in mind that WSGI applications shouldn't be mounted directly: They must be wrapped around with :class:`tg.controllers.WSGIAppController`. """ secc = SecureController() admin = AdminController(model, DBSession, config_type=CustomAdminConfig) error = ErrorController() def _before(self, *args, **kw): tmpl_context.project_name = "depotexample" @expose('depotexample.templates.index') def index(self): """Handle the front-page.""" return dict(page='index') @expose('depotexample.templates.about') def about(self): """Handle the 'about' page.""" return dict(page='about') @expose('depotexample.templates.environ') def environ(self): """This method showcases TG's access to the wsgi environment.""" return dict(page='environ', environment=request.environ) @expose('depotexample.templates.data') @expose('json') def data(self, **kw): """This method showcases how you can use the same controller for a data page and a display page""" return dict(page='data', params=kw) @expose('depotexample.templates.index') @require(predicates.has_permission('manage', msg=l_('Only for managers'))) def manage_permission_only(self, **kw): """Illustrate how a page for managers only works.""" return dict(page='managers stuff') @expose('depotexample.templates.index') @require(predicates.is_user('editor', msg=l_('Only for the editor'))) def editor_user_only(self, **kw): """Illustrate how a page exclusive for the editor works.""" return dict(page='editor stuff') @expose('depotexample.templates.login') @expose() def post_login(self, came_from=lurl('/')): """ Redirect the user to the initially requested page on successful authentication or redirect her back to the login page if login failed. """ if not request.identity: login_counter = request.environ.get('repoze.who.logins', 0) + 1 redirect('/login', params=dict(came_from=came_from, __logins=login_counter)) userid = request.identity['repoze.who.userid'] flash(_('Welcome back, %s!') % userid) redirect(came_from) @expose() def post_logout(self, came_from=lurl('/')): """ Redirect the user to the initially requested page on logout and say goodbye as well. """ flash(_('We hope to see you soon!')) redirect(came_from)
amol-/depot
examples/turbogears/depotexample/controllers/root.py
RootController.post_login
python
def post_login(self, came_from=lurl('/')): if not request.identity: login_counter = request.environ.get('repoze.who.logins', 0) + 1 redirect('/login', params=dict(came_from=came_from, __logins=login_counter)) userid = request.identity['repoze.who.userid'] flash(_('Welcome back, %s!') % userid) redirect(came_from)
Redirect the user to the initially requested page on successful authentication or redirect her back to the login page if login failed.
train
https://github.com/amol-/depot/blob/82104d2ae54f8ef55f05fb5a3f148cdc9f928959/examples/turbogears/depotexample/controllers/root.py#L106-L118
null
class RootController(BaseController): """ The root controller for the depotexample application. All the other controllers and WSGI applications should be mounted on this controller. For example:: panel = ControlPanelController() another_app = AnotherWSGIApplication() Keep in mind that WSGI applications shouldn't be mounted directly: They must be wrapped around with :class:`tg.controllers.WSGIAppController`. """ secc = SecureController() admin = AdminController(model, DBSession, config_type=CustomAdminConfig) error = ErrorController() def _before(self, *args, **kw): tmpl_context.project_name = "depotexample" @expose('depotexample.templates.index') def index(self): """Handle the front-page.""" return dict(page='index') @expose('depotexample.templates.about') def about(self): """Handle the 'about' page.""" return dict(page='about') @expose('depotexample.templates.environ') def environ(self): """This method showcases TG's access to the wsgi environment.""" return dict(page='environ', environment=request.environ) @expose('depotexample.templates.data') @expose('json') def data(self, **kw): """This method showcases how you can use the same controller for a data page and a display page""" return dict(page='data', params=kw) @expose('depotexample.templates.index') @require(predicates.has_permission('manage', msg=l_('Only for managers'))) def manage_permission_only(self, **kw): """Illustrate how a page for managers only works.""" return dict(page='managers stuff') @expose('depotexample.templates.index') @require(predicates.is_user('editor', msg=l_('Only for the editor'))) def editor_user_only(self, **kw): """Illustrate how a page exclusive for the editor works.""" return dict(page='editor stuff') @expose('depotexample.templates.login') def login(self, came_from=lurl('/')): """Start the user login.""" login_counter = request.environ.get('repoze.who.logins', 0) if login_counter > 0: flash(_('Wrong credentials'), 'warning') return dict(page='login', login_counter=str(login_counter), came_from=came_from) @expose() @expose() def post_logout(self, came_from=lurl('/')): """ Redirect the user to the initially requested page on logout and say goodbye as well. """ flash(_('We hope to see you soon!')) redirect(came_from)
cgarciae/phi
phi/builder.py
Builder.RegisterMethod
python
def RegisterMethod(cls, *args, **kwargs): unpack_error = True try: f, library_path = args unpack_error = False cls._RegisterMethod(f, library_path, **kwargs) except: if not unpack_error: raise def register_decorator(f): library_path, = args cls._RegisterMethod(f, library_path, **kwargs) return f return register_decorator
**RegisterMethod** RegisterMethod(f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True) `classmethod` for registering functions as methods of this class. **Arguments** * **f** : the particular function being registered as a method * **library_path** : library from where `f` comes from, unless you pass an empty string, put a period `"."` at the end of the library name. * `alias=None` : alias for the name/method being registered * `original_name=None` : name of the original function, used for documentation purposes. * `doc=None` : complete documentation of the method being registered * `wrapped=None` : if you are registering a function which wraps around another function, pass this other function through `wrapped` to get better documentation, this is specially useful is you register a bunch of functions in a for loop. Please include an `explanation` to tell how the actual function differs from the wrapped one. * `explanation=""` : especify any additional information for the documentation of the method being registered, you can use any of the following format tags within this string and they will be replace latter on: `{original_name}`, `{name}`, `{fn_docs}`, `{library_path}`, `{builder_class}`. * `method_type=identity` : by default its applied but does nothing, you might also want to register functions as `property`, `classmethod`, `staticmethod` * `explain=True` : decide whether or not to show any kind of explanation, its useful to set it to `False` if you are using a `Register*` decorator and will only use the function as a registered method. A main feature of `phi` is that it enables you to integrate your library or even an existing library with the DSL. You can achieve three levels of integration 1. Passing your functions to the DSL. This a very general machanism -since you could actually do everything with python lamdas- but in practice functions often receive multiple parameters. 2. Creating partials with the `Then*` method family. Using this you could integrate any function, but it will add a lot of noise if you use heavily on it. 3. Registering functions as methods of a `Builder` derived class. This produces the most readable code and its the approach you should take if you want to create a Phi-based library or a helper class. While point 3 is the most desirable it has a cost: you need to create your own `phi.builder.Builder`-derived class. This is because SHOULD NOT register functions to existing builders e.g. the `phi.builder.Builder` or [PythonBuilder](https://cgarciae.github.io/phi/builder.m.html#phi.python_builder.PythonBuilder) provided by phi because that would pollute the `P` object. Instead you should create a custom class that derives from `phi.builder.Builder`, [PythonBuilder](https://cgarciae.github.io/phi/builder.m.html#phi.python_builder.PythonBuilder) or another custom builder depending on your needs and register your functions to that class. **Examples** Say you have a function on a library called `"my_lib"` def some_fun(obj, arg1, arg2): # code You could use it with the dsl like this from phi import P, Then P.Pipe( input, ... Then(some_fun, arg1, arg2) ... ) assuming the first parameter `obj` is being piped down. However if you do this very often or you are creating a library, you are better off creating a custom class derived from `Builder` or `PythonBuilder` from phi import Builder #or PythonBuilder class MyBuilder(Builder): # or PythonBuilder pass and registering your function as a method. The first way you could do this is by creating a wrapper function for `some_fun` and registering it as a method def some_fun_wrapper(self, arg1, arg2): return self.Then(some_fun, arg1, arg2) MyBuilder.RegisterMethod(some_fun_wrapper, "my_lib.", wrapped=some_fun) Here we basically created a shortcut for the original expression `Then(some_fun, arg1, arg2)`. You could also do this using a decorator @MyBuilder.RegisterMethod("my_lib.", wrapped=some_fun) def some_fun_wrapper(self, arg1, arg2): return self.Then(some_fun, arg1, arg2) However, this is such a common task that we've created the method `Register` to avoid you from having to create the wrapper. With it you could register the function `some_fun` directly as a method like this MyBuilder.Register(some_fun, "my_lib.") or by using a decorator over the original function definition @MyBuilder.Register("my_lib.") def some_fun(obj, arg1, arg2): # code Once done you've done any of the previous approaches you can create a custom global object e.g. `M` and use it instead of/along with `P` M = MyBuilder(lambda x: x) M.Pipe( input, ... M.some_fun(arg1, args) ... ) **Argument position** `phi.builder.Builder.Register` internally uses `phi.builder.Builder.Then`, this is only useful if the object being piped is intended to be passed as the first argument of the function being registered, if this is not the case you could use `phi.builder.Builder.Register2`, `phi.builder.Builder.Register3`, ..., `phi.builder.Builder.Register5` or `phi.builder.Builder.RegisterAt` to set an arbitrary position, these functions will internally use `phi.builder.Builder.Then2`, `phi.builder.Builder.Then3`, ..., `phi.builder.Builder.Then5` or `phi.builder.Builder.ThenAt` respectively. **Wrapping functions** Sometimes you have an existing function that you would like to modify slightly so it plays nicely with the DSL, what you normally do is create a function that wraps around it and passes the arguments to it in a way that is convenient import some_lib @MyBuilder.Register("some_lib.") def some_fun(a, n): return some_lib.some_fun(a, n - 1) # forward the args, n slightly modified When you do this -as a side effect- you loose the original documentation, to avoid this you can use the Registers `wrapped` argument along with the `explanation` argument to clarity the situation import some_lib some_fun_explanation = "However, it differs in that `n` is automatically subtracted `1`" @MyBuilder.Register("some_lib.", wrapped=some_lib.some_fun, explanation=some_fun_explanation) def some_fun(a, n): return some_lib.some_fun(a, n - 1) # forward the args, n slightly modified Now the documentation for `MyBuilder.some_fun` will be a little bit nicer since it includes the original documentation from `some_lib.some_fun`. This behaviour is specially useful if you are wrapping an entire 3rd party library, you usually automate the process iterating over all the funcitions in a for loop. The `phi.builder.Builder.PatchAt` method lets you register and entire module using a few lines of code, however, something you have to do thing more manually and do the iteration yourself. **See Also** * `phi.builder.Builder.PatchAt` * `phi.builder.Builder.RegisterAt`
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/builder.py#L71-L205
[ " def _RegisterMethod(cls, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation=\"\", method_type=utils.identity, explain=True):\n if wrapped:\n f = functools.wraps(wrapped)(f)\n\n fn_signature = utils.get_method_sig(f)\n fn_docs = inspect.getdoc(f)\n name = alias if alias else f.__name__\n original_name = f.__name__ if wrapped else original_name if original_name else name\n\n f.__name__ = str(name)\n f.__doc__ = doc if doc else (\"\"\"\nTHIS METHOD IS AUTOMATICALLY GENERATED\n\n {builder_class}.{name}(*args, **kwargs)\n\nIt accepts the same arguments as `{library_path}{original_name}`. \"\"\" + explanation + \"\"\"\n\n**{library_path}{original_name}**\n\n {fn_docs}\n\n \"\"\").format(original_name=original_name, name=name, fn_docs=fn_docs, library_path=library_path, builder_class=cls.__name__) if explain else fn_docs\n\n if name in cls.__core__:\n raise Exception(\"Can't add method '{0}' because its on __core__\".format(name))\n\n f = method_type(f)\n setattr(cls, name, f)\n" ]
class Builder(dsl.Expression): """ All the public methods of the `Builder`, `Expression` and `Expression` classes start with a capital letter on purpose to avoid name chashes with methods that you might register.""" @classmethod def _RegisterMethod(cls, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True): if wrapped: f = functools.wraps(wrapped)(f) fn_signature = utils.get_method_sig(f) fn_docs = inspect.getdoc(f) name = alias if alias else f.__name__ original_name = f.__name__ if wrapped else original_name if original_name else name f.__name__ = str(name) f.__doc__ = doc if doc else (""" THIS METHOD IS AUTOMATICALLY GENERATED {builder_class}.{name}(*args, **kwargs) It accepts the same arguments as `{library_path}{original_name}`. """ + explanation + """ **{library_path}{original_name}** {fn_docs} """).format(original_name=original_name, name=name, fn_docs=fn_docs, library_path=library_path, builder_class=cls.__name__) if explain else fn_docs if name in cls.__core__: raise Exception("Can't add method '{0}' because its on __core__".format(name)) f = method_type(f) setattr(cls, name, f) @classmethod @classmethod def _RegisterAt(cls, n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True, _return_type=None): _wrapped = wrapped if wrapped else f try: @functools.wraps(f) def method(self, *args, **kwargs): kwargs['_return_type'] = _return_type return self.ThenAt(n, f, *args, **kwargs) except: raise all_args, previous_args, last_arg = _make_args_strs(n) explanation = """ However, the 1st argument is omitted, a partial with the rest of the arguments is returned which expects the 1st argument such that {library_path}{original_name}("""+ all_args +"""*args, **kwargs) is equivalent to {builder_class}.{name}("""+ previous_args +"""*args, **kwargs)("""+ last_arg +""") """ + explanation if explain else "" cls.RegisterMethod(method, library_path, alias=alias, original_name=original_name, doc=doc, wrapped=wrapped, explanation=explanation, method_type=method_type, explain=explain) @classmethod def RegisterAt(cls, *args, **kwargs): """ **RegisterAt** RegisterAt(n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True, _return_type=None) Most of the time you don't want to register an method as such, that is, you don't care about the `self` builder object, instead you want to register a function that transforms the value being piped down the DSL. For this you can use `RegisterAt` so e.g. def some_fun(obj, arg1, arg2): # code @MyBuilder.RegisterMethod("my_lib.") def some_fun_wrapper(self, arg1, arg2): return self.ThenAt(1, some_fun, arg1, arg2) can be written directly as @MyBuilder.RegisterAt(1, "my_lib.") def some_fun(obj, arg1, arg2): # code For this case you can just use `Register` which is a shortcut for `RegisterAt(1, ...)` @MyBuilder.Register("my_lib.") def some_fun(obj, arg1, arg2): # code **Also See** * `phi.builder.Builder.RegisterMethod` """ unpack_error = True try: n, f, library_path = args unpack_error = False cls._RegisterAt(n, f, library_path, **kwargs) except: if not unpack_error: raise def register_decorator(f): n, library_path = args cls._RegisterAt(n, f, library_path, **kwargs) return f return register_decorator @classmethod def Register0(cls, *args, **kwargs): """ `Register0(...)` is a shortcut for `RegisterAt(0, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(0, *args, **kwargs) @classmethod def Register(cls, *args, **kwargs): """ `Register(...)` is a shortcut for `RegisterAt(1, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(1, *args, **kwargs) @classmethod def Register2(cls, *args, **kwargs): """ `Register2(...)` is a shortcut for `RegisterAt(2, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(2, *args, **kwargs) @classmethod def Register3(cls, *args, **kwargs): """ `Register3(...)` is a shortcut for `RegisterAt(3, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(3, *args, **kwargs) @classmethod def Register4(cls, *args, **kwargs): """ `Register4(...)` is a shortcut for `RegisterAt(4, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(4, *args, **kwargs) @classmethod def Register5(cls, *args, **kwargs): """ `Register5(...)` is a shortcut for `RegisterAt(5, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(5, *args, **kwargs) @classmethod def PatchAt(cls, n, module, method_wrapper=None, module_alias=None, method_name_modifier=utils.identity, blacklist_predicate=_False, whitelist_predicate=_True, return_type_predicate=_None, getmembers_predicate=inspect.isfunction, admit_private=False, explanation=""): """ This classmethod lets you easily patch all of functions/callables from a module or class as methods a Builder class. **Arguments** * **n** : the position the the object being piped will take in the arguments when the function being patched is applied. See `RegisterMethod` and `ThenAt`. * **module** : a module or class from which the functions/methods/callables will be taken. * `module_alias = None` : an optional alias for the module used for documentation purposes. * `method_name_modifier = lambda f_name: None` : a function that can modify the name of the method will take. If `None` the name of the function will be used. * `blacklist_predicate = lambda f_name: name[0] != "_"` : A predicate that determines which functions are banned given their name. By default it excludes all function whose name start with `'_'`. `blacklist_predicate` can also be of type list, in which case all names contained in this list will be banned. * `whitelist_predicate = lambda f_name: True` : A predicate that determines which functions are admitted given their name. By default it include any function. `whitelist_predicate` can also be of type list, in which case only names contained in this list will be admitted. You can use both `blacklist_predicate` and `whitelist_predicate` at the same time. * `return_type_predicate = lambda f_name: None` : a predicate that determines the `_return_type` of the Builder. By default it will always return `None`. See `phi.builder.Builder.ThenAt`. * `getmembers_predicate = inspect.isfunction` : a predicate that determines what type of elements/members will be fetched by the `inspect` module, defaults to [inspect.isfunction](https://docs.python.org/2/library/inspect.html#inspect.isfunction). See [getmembers](https://docs.python.org/2/library/inspect.html#inspect.getmembers). **Examples** Lets patch ALL the main functions from numpy into a custom builder! from phi import PythonBuilder #or Builder import numpy as np class NumpyBuilder(PythonBuilder): #or Builder "A Builder for numpy functions!" pass NumpyBuilder.PatchAt(1, np) N = NumpyBuilder(lambda x: x) Thats it! Although a serious patch would involve filtering out functions that don't take arrays. Another common task would be to use `NumpyBuilder.PatchAt(2, ...)` (`PatchAt(n, ..)` in general) when convenient to send the object being pipe to the relevant argument of the function. The previous is usually done with and a combination of `whitelist_predicate`s and `blacklist_predicate`s on `PatchAt(1, ...)` and `PatchAt(2, ...)` to filter or include the approriate functions on each kind of patch. Given the previous code we could now do import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = N.Pipe( x, N .dot(y) .add(x) .transpose() .sum(axis=1) ) Which is strictly equivalent to import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = np.dot(x, y) z = np.add(z, x) z = np.transpose(z) z = np.sum(z, axis=1) The thing to notice is that with the `NumpyBuilder` we avoid the repetitive and needless passing and reassigment of the `z` variable, this removes a lot of noise from our code. """ _rtp = return_type_predicate return_type_predicate = (lambda x: _rtp) if inspect.isclass(_rtp) and issubclass(_rtp, Builder) else _rtp module_name = module_alias if module_alias else module.__name__ + '.' patch_members = _get_patch_members(module, blacklist_predicate=blacklist_predicate, whitelist_predicate=whitelist_predicate, getmembers_predicate=getmembers_predicate, admit_private=admit_private) for name, f in patch_members: wrapped = None if method_wrapper: g = method_wrapper(f) wrapped = f else: g = f cls.RegisterAt(n, g, module_name, wrapped=wrapped, _return_type=return_type_predicate(name), alias=method_name_modifier(name), explanation=explanation)
cgarciae/phi
phi/builder.py
Builder.RegisterAt
python
def RegisterAt(cls, *args, **kwargs): unpack_error = True try: n, f, library_path = args unpack_error = False cls._RegisterAt(n, f, library_path, **kwargs) except: if not unpack_error: raise def register_decorator(f): n, library_path = args cls._RegisterAt(n, f, library_path, **kwargs) return f return register_decorator
**RegisterAt** RegisterAt(n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True, _return_type=None) Most of the time you don't want to register an method as such, that is, you don't care about the `self` builder object, instead you want to register a function that transforms the value being piped down the DSL. For this you can use `RegisterAt` so e.g. def some_fun(obj, arg1, arg2): # code @MyBuilder.RegisterMethod("my_lib.") def some_fun_wrapper(self, arg1, arg2): return self.ThenAt(1, some_fun, arg1, arg2) can be written directly as @MyBuilder.RegisterAt(1, "my_lib.") def some_fun(obj, arg1, arg2): # code For this case you can just use `Register` which is a shortcut for `RegisterAt(1, ...)` @MyBuilder.Register("my_lib.") def some_fun(obj, arg1, arg2): # code **Also See** * `phi.builder.Builder.RegisterMethod`
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/builder.py#L238-L285
[ " def _RegisterAt(cls, n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation=\"\", method_type=utils.identity, explain=True, _return_type=None):\n\n _wrapped = wrapped if wrapped else f\n\n try:\n @functools.wraps(f)\n def method(self, *args, **kwargs):\n\n kwargs['_return_type'] = _return_type\n return self.ThenAt(n, f, *args, **kwargs)\n except:\n raise\n\n all_args, previous_args, last_arg = _make_args_strs(n)\n\n explanation = \"\"\"\nHowever, the 1st argument is omitted, a partial with the rest of the arguments is returned which expects the 1st argument such that\n\n {library_path}{original_name}(\"\"\"+ all_args +\"\"\"*args, **kwargs)\n\nis equivalent to\n\n {builder_class}.{name}(\"\"\"+ previous_args +\"\"\"*args, **kwargs)(\"\"\"+ last_arg +\"\"\")\n\n \"\"\" + explanation if explain else \"\"\n\n cls.RegisterMethod(method, library_path, alias=alias, original_name=original_name, doc=doc, wrapped=wrapped, explanation=explanation, method_type=method_type, explain=explain)\n" ]
class Builder(dsl.Expression): """ All the public methods of the `Builder`, `Expression` and `Expression` classes start with a capital letter on purpose to avoid name chashes with methods that you might register.""" @classmethod def _RegisterMethod(cls, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True): if wrapped: f = functools.wraps(wrapped)(f) fn_signature = utils.get_method_sig(f) fn_docs = inspect.getdoc(f) name = alias if alias else f.__name__ original_name = f.__name__ if wrapped else original_name if original_name else name f.__name__ = str(name) f.__doc__ = doc if doc else (""" THIS METHOD IS AUTOMATICALLY GENERATED {builder_class}.{name}(*args, **kwargs) It accepts the same arguments as `{library_path}{original_name}`. """ + explanation + """ **{library_path}{original_name}** {fn_docs} """).format(original_name=original_name, name=name, fn_docs=fn_docs, library_path=library_path, builder_class=cls.__name__) if explain else fn_docs if name in cls.__core__: raise Exception("Can't add method '{0}' because its on __core__".format(name)) f = method_type(f) setattr(cls, name, f) @classmethod def RegisterMethod(cls, *args, **kwargs): """ **RegisterMethod** RegisterMethod(f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True) `classmethod` for registering functions as methods of this class. **Arguments** * **f** : the particular function being registered as a method * **library_path** : library from where `f` comes from, unless you pass an empty string, put a period `"."` at the end of the library name. * `alias=None` : alias for the name/method being registered * `original_name=None` : name of the original function, used for documentation purposes. * `doc=None` : complete documentation of the method being registered * `wrapped=None` : if you are registering a function which wraps around another function, pass this other function through `wrapped` to get better documentation, this is specially useful is you register a bunch of functions in a for loop. Please include an `explanation` to tell how the actual function differs from the wrapped one. * `explanation=""` : especify any additional information for the documentation of the method being registered, you can use any of the following format tags within this string and they will be replace latter on: `{original_name}`, `{name}`, `{fn_docs}`, `{library_path}`, `{builder_class}`. * `method_type=identity` : by default its applied but does nothing, you might also want to register functions as `property`, `classmethod`, `staticmethod` * `explain=True` : decide whether or not to show any kind of explanation, its useful to set it to `False` if you are using a `Register*` decorator and will only use the function as a registered method. A main feature of `phi` is that it enables you to integrate your library or even an existing library with the DSL. You can achieve three levels of integration 1. Passing your functions to the DSL. This a very general machanism -since you could actually do everything with python lamdas- but in practice functions often receive multiple parameters. 2. Creating partials with the `Then*` method family. Using this you could integrate any function, but it will add a lot of noise if you use heavily on it. 3. Registering functions as methods of a `Builder` derived class. This produces the most readable code and its the approach you should take if you want to create a Phi-based library or a helper class. While point 3 is the most desirable it has a cost: you need to create your own `phi.builder.Builder`-derived class. This is because SHOULD NOT register functions to existing builders e.g. the `phi.builder.Builder` or [PythonBuilder](https://cgarciae.github.io/phi/builder.m.html#phi.python_builder.PythonBuilder) provided by phi because that would pollute the `P` object. Instead you should create a custom class that derives from `phi.builder.Builder`, [PythonBuilder](https://cgarciae.github.io/phi/builder.m.html#phi.python_builder.PythonBuilder) or another custom builder depending on your needs and register your functions to that class. **Examples** Say you have a function on a library called `"my_lib"` def some_fun(obj, arg1, arg2): # code You could use it with the dsl like this from phi import P, Then P.Pipe( input, ... Then(some_fun, arg1, arg2) ... ) assuming the first parameter `obj` is being piped down. However if you do this very often or you are creating a library, you are better off creating a custom class derived from `Builder` or `PythonBuilder` from phi import Builder #or PythonBuilder class MyBuilder(Builder): # or PythonBuilder pass and registering your function as a method. The first way you could do this is by creating a wrapper function for `some_fun` and registering it as a method def some_fun_wrapper(self, arg1, arg2): return self.Then(some_fun, arg1, arg2) MyBuilder.RegisterMethod(some_fun_wrapper, "my_lib.", wrapped=some_fun) Here we basically created a shortcut for the original expression `Then(some_fun, arg1, arg2)`. You could also do this using a decorator @MyBuilder.RegisterMethod("my_lib.", wrapped=some_fun) def some_fun_wrapper(self, arg1, arg2): return self.Then(some_fun, arg1, arg2) However, this is such a common task that we've created the method `Register` to avoid you from having to create the wrapper. With it you could register the function `some_fun` directly as a method like this MyBuilder.Register(some_fun, "my_lib.") or by using a decorator over the original function definition @MyBuilder.Register("my_lib.") def some_fun(obj, arg1, arg2): # code Once done you've done any of the previous approaches you can create a custom global object e.g. `M` and use it instead of/along with `P` M = MyBuilder(lambda x: x) M.Pipe( input, ... M.some_fun(arg1, args) ... ) **Argument position** `phi.builder.Builder.Register` internally uses `phi.builder.Builder.Then`, this is only useful if the object being piped is intended to be passed as the first argument of the function being registered, if this is not the case you could use `phi.builder.Builder.Register2`, `phi.builder.Builder.Register3`, ..., `phi.builder.Builder.Register5` or `phi.builder.Builder.RegisterAt` to set an arbitrary position, these functions will internally use `phi.builder.Builder.Then2`, `phi.builder.Builder.Then3`, ..., `phi.builder.Builder.Then5` or `phi.builder.Builder.ThenAt` respectively. **Wrapping functions** Sometimes you have an existing function that you would like to modify slightly so it plays nicely with the DSL, what you normally do is create a function that wraps around it and passes the arguments to it in a way that is convenient import some_lib @MyBuilder.Register("some_lib.") def some_fun(a, n): return some_lib.some_fun(a, n - 1) # forward the args, n slightly modified When you do this -as a side effect- you loose the original documentation, to avoid this you can use the Registers `wrapped` argument along with the `explanation` argument to clarity the situation import some_lib some_fun_explanation = "However, it differs in that `n` is automatically subtracted `1`" @MyBuilder.Register("some_lib.", wrapped=some_lib.some_fun, explanation=some_fun_explanation) def some_fun(a, n): return some_lib.some_fun(a, n - 1) # forward the args, n slightly modified Now the documentation for `MyBuilder.some_fun` will be a little bit nicer since it includes the original documentation from `some_lib.some_fun`. This behaviour is specially useful if you are wrapping an entire 3rd party library, you usually automate the process iterating over all the funcitions in a for loop. The `phi.builder.Builder.PatchAt` method lets you register and entire module using a few lines of code, however, something you have to do thing more manually and do the iteration yourself. **See Also** * `phi.builder.Builder.PatchAt` * `phi.builder.Builder.RegisterAt` """ unpack_error = True try: f, library_path = args unpack_error = False cls._RegisterMethod(f, library_path, **kwargs) except: if not unpack_error: raise def register_decorator(f): library_path, = args cls._RegisterMethod(f, library_path, **kwargs) return f return register_decorator @classmethod def _RegisterAt(cls, n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True, _return_type=None): _wrapped = wrapped if wrapped else f try: @functools.wraps(f) def method(self, *args, **kwargs): kwargs['_return_type'] = _return_type return self.ThenAt(n, f, *args, **kwargs) except: raise all_args, previous_args, last_arg = _make_args_strs(n) explanation = """ However, the 1st argument is omitted, a partial with the rest of the arguments is returned which expects the 1st argument such that {library_path}{original_name}("""+ all_args +"""*args, **kwargs) is equivalent to {builder_class}.{name}("""+ previous_args +"""*args, **kwargs)("""+ last_arg +""") """ + explanation if explain else "" cls.RegisterMethod(method, library_path, alias=alias, original_name=original_name, doc=doc, wrapped=wrapped, explanation=explanation, method_type=method_type, explain=explain) @classmethod @classmethod def Register0(cls, *args, **kwargs): """ `Register0(...)` is a shortcut for `RegisterAt(0, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(0, *args, **kwargs) @classmethod def Register(cls, *args, **kwargs): """ `Register(...)` is a shortcut for `RegisterAt(1, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(1, *args, **kwargs) @classmethod def Register2(cls, *args, **kwargs): """ `Register2(...)` is a shortcut for `RegisterAt(2, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(2, *args, **kwargs) @classmethod def Register3(cls, *args, **kwargs): """ `Register3(...)` is a shortcut for `RegisterAt(3, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(3, *args, **kwargs) @classmethod def Register4(cls, *args, **kwargs): """ `Register4(...)` is a shortcut for `RegisterAt(4, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(4, *args, **kwargs) @classmethod def Register5(cls, *args, **kwargs): """ `Register5(...)` is a shortcut for `RegisterAt(5, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(5, *args, **kwargs) @classmethod def PatchAt(cls, n, module, method_wrapper=None, module_alias=None, method_name_modifier=utils.identity, blacklist_predicate=_False, whitelist_predicate=_True, return_type_predicate=_None, getmembers_predicate=inspect.isfunction, admit_private=False, explanation=""): """ This classmethod lets you easily patch all of functions/callables from a module or class as methods a Builder class. **Arguments** * **n** : the position the the object being piped will take in the arguments when the function being patched is applied. See `RegisterMethod` and `ThenAt`. * **module** : a module or class from which the functions/methods/callables will be taken. * `module_alias = None` : an optional alias for the module used for documentation purposes. * `method_name_modifier = lambda f_name: None` : a function that can modify the name of the method will take. If `None` the name of the function will be used. * `blacklist_predicate = lambda f_name: name[0] != "_"` : A predicate that determines which functions are banned given their name. By default it excludes all function whose name start with `'_'`. `blacklist_predicate` can also be of type list, in which case all names contained in this list will be banned. * `whitelist_predicate = lambda f_name: True` : A predicate that determines which functions are admitted given their name. By default it include any function. `whitelist_predicate` can also be of type list, in which case only names contained in this list will be admitted. You can use both `blacklist_predicate` and `whitelist_predicate` at the same time. * `return_type_predicate = lambda f_name: None` : a predicate that determines the `_return_type` of the Builder. By default it will always return `None`. See `phi.builder.Builder.ThenAt`. * `getmembers_predicate = inspect.isfunction` : a predicate that determines what type of elements/members will be fetched by the `inspect` module, defaults to [inspect.isfunction](https://docs.python.org/2/library/inspect.html#inspect.isfunction). See [getmembers](https://docs.python.org/2/library/inspect.html#inspect.getmembers). **Examples** Lets patch ALL the main functions from numpy into a custom builder! from phi import PythonBuilder #or Builder import numpy as np class NumpyBuilder(PythonBuilder): #or Builder "A Builder for numpy functions!" pass NumpyBuilder.PatchAt(1, np) N = NumpyBuilder(lambda x: x) Thats it! Although a serious patch would involve filtering out functions that don't take arrays. Another common task would be to use `NumpyBuilder.PatchAt(2, ...)` (`PatchAt(n, ..)` in general) when convenient to send the object being pipe to the relevant argument of the function. The previous is usually done with and a combination of `whitelist_predicate`s and `blacklist_predicate`s on `PatchAt(1, ...)` and `PatchAt(2, ...)` to filter or include the approriate functions on each kind of patch. Given the previous code we could now do import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = N.Pipe( x, N .dot(y) .add(x) .transpose() .sum(axis=1) ) Which is strictly equivalent to import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = np.dot(x, y) z = np.add(z, x) z = np.transpose(z) z = np.sum(z, axis=1) The thing to notice is that with the `NumpyBuilder` we avoid the repetitive and needless passing and reassigment of the `z` variable, this removes a lot of noise from our code. """ _rtp = return_type_predicate return_type_predicate = (lambda x: _rtp) if inspect.isclass(_rtp) and issubclass(_rtp, Builder) else _rtp module_name = module_alias if module_alias else module.__name__ + '.' patch_members = _get_patch_members(module, blacklist_predicate=blacklist_predicate, whitelist_predicate=whitelist_predicate, getmembers_predicate=getmembers_predicate, admit_private=admit_private) for name, f in patch_members: wrapped = None if method_wrapper: g = method_wrapper(f) wrapped = f else: g = f cls.RegisterAt(n, g, module_name, wrapped=wrapped, _return_type=return_type_predicate(name), alias=method_name_modifier(name), explanation=explanation)
cgarciae/phi
phi/builder.py
Builder.PatchAt
python
def PatchAt(cls, n, module, method_wrapper=None, module_alias=None, method_name_modifier=utils.identity, blacklist_predicate=_False, whitelist_predicate=_True, return_type_predicate=_None, getmembers_predicate=inspect.isfunction, admit_private=False, explanation=""): _rtp = return_type_predicate return_type_predicate = (lambda x: _rtp) if inspect.isclass(_rtp) and issubclass(_rtp, Builder) else _rtp module_name = module_alias if module_alias else module.__name__ + '.' patch_members = _get_patch_members(module, blacklist_predicate=blacklist_predicate, whitelist_predicate=whitelist_predicate, getmembers_predicate=getmembers_predicate, admit_private=admit_private) for name, f in patch_members: wrapped = None if method_wrapper: g = method_wrapper(f) wrapped = f else: g = f cls.RegisterAt(n, g, module_name, wrapped=wrapped, _return_type=return_type_predicate(name), alias=method_name_modifier(name), explanation=explanation)
This classmethod lets you easily patch all of functions/callables from a module or class as methods a Builder class. **Arguments** * **n** : the position the the object being piped will take in the arguments when the function being patched is applied. See `RegisterMethod` and `ThenAt`. * **module** : a module or class from which the functions/methods/callables will be taken. * `module_alias = None` : an optional alias for the module used for documentation purposes. * `method_name_modifier = lambda f_name: None` : a function that can modify the name of the method will take. If `None` the name of the function will be used. * `blacklist_predicate = lambda f_name: name[0] != "_"` : A predicate that determines which functions are banned given their name. By default it excludes all function whose name start with `'_'`. `blacklist_predicate` can also be of type list, in which case all names contained in this list will be banned. * `whitelist_predicate = lambda f_name: True` : A predicate that determines which functions are admitted given their name. By default it include any function. `whitelist_predicate` can also be of type list, in which case only names contained in this list will be admitted. You can use both `blacklist_predicate` and `whitelist_predicate` at the same time. * `return_type_predicate = lambda f_name: None` : a predicate that determines the `_return_type` of the Builder. By default it will always return `None`. See `phi.builder.Builder.ThenAt`. * `getmembers_predicate = inspect.isfunction` : a predicate that determines what type of elements/members will be fetched by the `inspect` module, defaults to [inspect.isfunction](https://docs.python.org/2/library/inspect.html#inspect.isfunction). See [getmembers](https://docs.python.org/2/library/inspect.html#inspect.getmembers). **Examples** Lets patch ALL the main functions from numpy into a custom builder! from phi import PythonBuilder #or Builder import numpy as np class NumpyBuilder(PythonBuilder): #or Builder "A Builder for numpy functions!" pass NumpyBuilder.PatchAt(1, np) N = NumpyBuilder(lambda x: x) Thats it! Although a serious patch would involve filtering out functions that don't take arrays. Another common task would be to use `NumpyBuilder.PatchAt(2, ...)` (`PatchAt(n, ..)` in general) when convenient to send the object being pipe to the relevant argument of the function. The previous is usually done with and a combination of `whitelist_predicate`s and `blacklist_predicate`s on `PatchAt(1, ...)` and `PatchAt(2, ...)` to filter or include the approriate functions on each kind of patch. Given the previous code we could now do import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = N.Pipe( x, N .dot(y) .add(x) .transpose() .sum(axis=1) ) Which is strictly equivalent to import numpy as np x = np.array([[1,2],[3,4]]) y = np.array([[5,6],[7,8]]) z = np.dot(x, y) z = np.add(z, x) z = np.transpose(z) z = np.sum(z, axis=1) The thing to notice is that with the `NumpyBuilder` we avoid the repetitive and needless passing and reassigment of the `z` variable, this removes a lot of noise from our code.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/builder.py#L360-L434
[ "def identity(x):\n return x\n", "def _get_patch_members(module, blacklist_predicate=_NoLeadingUnderscore, whitelist_predicate=_True, _return_type=None, getmembers_predicate=inspect.isfunction, admit_private=False):\n\n if type(whitelist_predicate) is list:\n whitelist = whitelist_predicate\n whitelist_predicate = lambda x: x in whitelist\n\n if type(blacklist_predicate) is list:\n blacklist = blacklist_predicate\n blacklist_predicate = lambda x: x in blacklist or '_' == x[0] if not admit_private else False\n\n return [\n (name, f) for (name, f) in inspect.getmembers(module, getmembers_predicate) if whitelist_predicate(name) and not blacklist_predicate(name)\n ]\n", " def RegisterAt(cls, *args, **kwargs):\n \"\"\"\n**RegisterAt**\n\n RegisterAt(n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation=\"\", method_type=utils.identity, explain=True, _return_type=None)\n\nMost of the time you don't want to register an method as such, that is, you don't care about the `self` builder object, instead you want to register a function that transforms the value being piped down the DSL. For this you can use `RegisterAt` so e.g.\n\n def some_fun(obj, arg1, arg2):\n # code\n\n @MyBuilder.RegisterMethod(\"my_lib.\")\n def some_fun_wrapper(self, arg1, arg2):\n return self.ThenAt(1, some_fun, arg1, arg2)\n\ncan be written directly as\n\n @MyBuilder.RegisterAt(1, \"my_lib.\")\n def some_fun(obj, arg1, arg2):\n # code\n\nFor this case you can just use `Register` which is a shortcut for `RegisterAt(1, ...)`\n\n @MyBuilder.Register(\"my_lib.\")\n def some_fun(obj, arg1, arg2):\n # code\n\n**Also See**\n\n* `phi.builder.Builder.RegisterMethod`\n \"\"\"\n unpack_error = True\n\n try:\n n, f, library_path = args\n unpack_error = False\n cls._RegisterAt(n, f, library_path, **kwargs)\n\n except:\n if not unpack_error:\n raise\n\n def register_decorator(f):\n n, library_path = args\n cls._RegisterAt(n, f, library_path, **kwargs)\n\n return f\n return register_decorator\n" ]
class Builder(dsl.Expression): """ All the public methods of the `Builder`, `Expression` and `Expression` classes start with a capital letter on purpose to avoid name chashes with methods that you might register.""" @classmethod def _RegisterMethod(cls, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True): if wrapped: f = functools.wraps(wrapped)(f) fn_signature = utils.get_method_sig(f) fn_docs = inspect.getdoc(f) name = alias if alias else f.__name__ original_name = f.__name__ if wrapped else original_name if original_name else name f.__name__ = str(name) f.__doc__ = doc if doc else (""" THIS METHOD IS AUTOMATICALLY GENERATED {builder_class}.{name}(*args, **kwargs) It accepts the same arguments as `{library_path}{original_name}`. """ + explanation + """ **{library_path}{original_name}** {fn_docs} """).format(original_name=original_name, name=name, fn_docs=fn_docs, library_path=library_path, builder_class=cls.__name__) if explain else fn_docs if name in cls.__core__: raise Exception("Can't add method '{0}' because its on __core__".format(name)) f = method_type(f) setattr(cls, name, f) @classmethod def RegisterMethod(cls, *args, **kwargs): """ **RegisterMethod** RegisterMethod(f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True) `classmethod` for registering functions as methods of this class. **Arguments** * **f** : the particular function being registered as a method * **library_path** : library from where `f` comes from, unless you pass an empty string, put a period `"."` at the end of the library name. * `alias=None` : alias for the name/method being registered * `original_name=None` : name of the original function, used for documentation purposes. * `doc=None` : complete documentation of the method being registered * `wrapped=None` : if you are registering a function which wraps around another function, pass this other function through `wrapped` to get better documentation, this is specially useful is you register a bunch of functions in a for loop. Please include an `explanation` to tell how the actual function differs from the wrapped one. * `explanation=""` : especify any additional information for the documentation of the method being registered, you can use any of the following format tags within this string and they will be replace latter on: `{original_name}`, `{name}`, `{fn_docs}`, `{library_path}`, `{builder_class}`. * `method_type=identity` : by default its applied but does nothing, you might also want to register functions as `property`, `classmethod`, `staticmethod` * `explain=True` : decide whether or not to show any kind of explanation, its useful to set it to `False` if you are using a `Register*` decorator and will only use the function as a registered method. A main feature of `phi` is that it enables you to integrate your library or even an existing library with the DSL. You can achieve three levels of integration 1. Passing your functions to the DSL. This a very general machanism -since you could actually do everything with python lamdas- but in practice functions often receive multiple parameters. 2. Creating partials with the `Then*` method family. Using this you could integrate any function, but it will add a lot of noise if you use heavily on it. 3. Registering functions as methods of a `Builder` derived class. This produces the most readable code and its the approach you should take if you want to create a Phi-based library or a helper class. While point 3 is the most desirable it has a cost: you need to create your own `phi.builder.Builder`-derived class. This is because SHOULD NOT register functions to existing builders e.g. the `phi.builder.Builder` or [PythonBuilder](https://cgarciae.github.io/phi/builder.m.html#phi.python_builder.PythonBuilder) provided by phi because that would pollute the `P` object. Instead you should create a custom class that derives from `phi.builder.Builder`, [PythonBuilder](https://cgarciae.github.io/phi/builder.m.html#phi.python_builder.PythonBuilder) or another custom builder depending on your needs and register your functions to that class. **Examples** Say you have a function on a library called `"my_lib"` def some_fun(obj, arg1, arg2): # code You could use it with the dsl like this from phi import P, Then P.Pipe( input, ... Then(some_fun, arg1, arg2) ... ) assuming the first parameter `obj` is being piped down. However if you do this very often or you are creating a library, you are better off creating a custom class derived from `Builder` or `PythonBuilder` from phi import Builder #or PythonBuilder class MyBuilder(Builder): # or PythonBuilder pass and registering your function as a method. The first way you could do this is by creating a wrapper function for `some_fun` and registering it as a method def some_fun_wrapper(self, arg1, arg2): return self.Then(some_fun, arg1, arg2) MyBuilder.RegisterMethod(some_fun_wrapper, "my_lib.", wrapped=some_fun) Here we basically created a shortcut for the original expression `Then(some_fun, arg1, arg2)`. You could also do this using a decorator @MyBuilder.RegisterMethod("my_lib.", wrapped=some_fun) def some_fun_wrapper(self, arg1, arg2): return self.Then(some_fun, arg1, arg2) However, this is such a common task that we've created the method `Register` to avoid you from having to create the wrapper. With it you could register the function `some_fun` directly as a method like this MyBuilder.Register(some_fun, "my_lib.") or by using a decorator over the original function definition @MyBuilder.Register("my_lib.") def some_fun(obj, arg1, arg2): # code Once done you've done any of the previous approaches you can create a custom global object e.g. `M` and use it instead of/along with `P` M = MyBuilder(lambda x: x) M.Pipe( input, ... M.some_fun(arg1, args) ... ) **Argument position** `phi.builder.Builder.Register` internally uses `phi.builder.Builder.Then`, this is only useful if the object being piped is intended to be passed as the first argument of the function being registered, if this is not the case you could use `phi.builder.Builder.Register2`, `phi.builder.Builder.Register3`, ..., `phi.builder.Builder.Register5` or `phi.builder.Builder.RegisterAt` to set an arbitrary position, these functions will internally use `phi.builder.Builder.Then2`, `phi.builder.Builder.Then3`, ..., `phi.builder.Builder.Then5` or `phi.builder.Builder.ThenAt` respectively. **Wrapping functions** Sometimes you have an existing function that you would like to modify slightly so it plays nicely with the DSL, what you normally do is create a function that wraps around it and passes the arguments to it in a way that is convenient import some_lib @MyBuilder.Register("some_lib.") def some_fun(a, n): return some_lib.some_fun(a, n - 1) # forward the args, n slightly modified When you do this -as a side effect- you loose the original documentation, to avoid this you can use the Registers `wrapped` argument along with the `explanation` argument to clarity the situation import some_lib some_fun_explanation = "However, it differs in that `n` is automatically subtracted `1`" @MyBuilder.Register("some_lib.", wrapped=some_lib.some_fun, explanation=some_fun_explanation) def some_fun(a, n): return some_lib.some_fun(a, n - 1) # forward the args, n slightly modified Now the documentation for `MyBuilder.some_fun` will be a little bit nicer since it includes the original documentation from `some_lib.some_fun`. This behaviour is specially useful if you are wrapping an entire 3rd party library, you usually automate the process iterating over all the funcitions in a for loop. The `phi.builder.Builder.PatchAt` method lets you register and entire module using a few lines of code, however, something you have to do thing more manually and do the iteration yourself. **See Also** * `phi.builder.Builder.PatchAt` * `phi.builder.Builder.RegisterAt` """ unpack_error = True try: f, library_path = args unpack_error = False cls._RegisterMethod(f, library_path, **kwargs) except: if not unpack_error: raise def register_decorator(f): library_path, = args cls._RegisterMethod(f, library_path, **kwargs) return f return register_decorator @classmethod def _RegisterAt(cls, n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True, _return_type=None): _wrapped = wrapped if wrapped else f try: @functools.wraps(f) def method(self, *args, **kwargs): kwargs['_return_type'] = _return_type return self.ThenAt(n, f, *args, **kwargs) except: raise all_args, previous_args, last_arg = _make_args_strs(n) explanation = """ However, the 1st argument is omitted, a partial with the rest of the arguments is returned which expects the 1st argument such that {library_path}{original_name}("""+ all_args +"""*args, **kwargs) is equivalent to {builder_class}.{name}("""+ previous_args +"""*args, **kwargs)("""+ last_arg +""") """ + explanation if explain else "" cls.RegisterMethod(method, library_path, alias=alias, original_name=original_name, doc=doc, wrapped=wrapped, explanation=explanation, method_type=method_type, explain=explain) @classmethod def RegisterAt(cls, *args, **kwargs): """ **RegisterAt** RegisterAt(n, f, library_path, alias=None, original_name=None, doc=None, wrapped=None, explanation="", method_type=utils.identity, explain=True, _return_type=None) Most of the time you don't want to register an method as such, that is, you don't care about the `self` builder object, instead you want to register a function that transforms the value being piped down the DSL. For this you can use `RegisterAt` so e.g. def some_fun(obj, arg1, arg2): # code @MyBuilder.RegisterMethod("my_lib.") def some_fun_wrapper(self, arg1, arg2): return self.ThenAt(1, some_fun, arg1, arg2) can be written directly as @MyBuilder.RegisterAt(1, "my_lib.") def some_fun(obj, arg1, arg2): # code For this case you can just use `Register` which is a shortcut for `RegisterAt(1, ...)` @MyBuilder.Register("my_lib.") def some_fun(obj, arg1, arg2): # code **Also See** * `phi.builder.Builder.RegisterMethod` """ unpack_error = True try: n, f, library_path = args unpack_error = False cls._RegisterAt(n, f, library_path, **kwargs) except: if not unpack_error: raise def register_decorator(f): n, library_path = args cls._RegisterAt(n, f, library_path, **kwargs) return f return register_decorator @classmethod def Register0(cls, *args, **kwargs): """ `Register0(...)` is a shortcut for `RegisterAt(0, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(0, *args, **kwargs) @classmethod def Register(cls, *args, **kwargs): """ `Register(...)` is a shortcut for `RegisterAt(1, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(1, *args, **kwargs) @classmethod def Register2(cls, *args, **kwargs): """ `Register2(...)` is a shortcut for `RegisterAt(2, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(2, *args, **kwargs) @classmethod def Register3(cls, *args, **kwargs): """ `Register3(...)` is a shortcut for `RegisterAt(3, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(3, *args, **kwargs) @classmethod def Register4(cls, *args, **kwargs): """ `Register4(...)` is a shortcut for `RegisterAt(4, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(4, *args, **kwargs) @classmethod def Register5(cls, *args, **kwargs): """ `Register5(...)` is a shortcut for `RegisterAt(5, ...)` **Also See** * `phi.builder.Builder.RegisterAt` * `phi.builder.Builder.RegisterMethod` """ return cls.RegisterAt(5, *args, **kwargs) @classmethod
cgarciae/phi
phi/utils.py
get_method_sig
python
def get_method_sig(method): # The return value of ArgSpec is a bit weird, as the list of arguments and # list of defaults are returned in separate array. # eg: ArgSpec(args=['first_arg', 'second_arg', 'third_arg'], # varargs=None, keywords=None, defaults=(42, 'something')) argspec = inspect.getargspec(method) arg_index=0 args = [] # Use the args and defaults array returned by argspec and find out # which arguments has default for arg in argspec.args: default_arg = _get_default_arg(argspec.args, argspec.defaults, arg_index) if default_arg.has_default: args.append("%s=%s" % (arg, default_arg.default_value)) else: args.append(arg) arg_index += 1 return "%s(%s)" % (method.__name__, ", ".join(args))
Given a function, it returns a string that pretty much looks how the function signature_ would be written in python. :param method: a python method :return: A string similar describing the pythong method signature_. eg: "my_method(first_argArg, second_arg=42, third_arg='something')"
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/utils.py#L64-L90
[ "def _get_default_arg(args, defaults, arg_index):\n \"\"\" Method that determines if an argument has default value or not,\n and if yes what is the default value for the argument\n\n :param args: array of arguments, eg: ['first_arg', 'second_arg', 'third_arg']\n :param defaults: array of default values, eg: (42, 'something')\n :param arg_index: index of the argument in the argument array for which,\n this function checks if a default value exists or not. And if default value\n exists it would return the default value. Example argument: 1\n :return: Tuple of whether there is a default or not, and if yes the default\n value, eg: for index 2 i.e. for \"second_arg\" this function returns (True, 42)\n \"\"\"\n if not defaults:\n return DefaultArgSpec(False, None)\n\n args_with_no_defaults = len(args) - len(defaults)\n\n if arg_index < args_with_no_defaults:\n return DefaultArgSpec(False, None)\n else:\n value = defaults[arg_index - args_with_no_defaults]\n if (type(value) is str):\n value = '\"%s\"' % value\n return DefaultArgSpec(True, value)\n" ]
""" """ from __future__ import absolute_import from __future__ import division from __future__ import print_function from __future__ import unicode_literals from collections import namedtuple import inspect def identity(x): return x def state_identity(x, state): return x, state def compose2(f, g): return lambda x: f(g(x)) def forward_compose2(f, g): return lambda x: g(f(x)) def merge(dict_a, dict_b): return dict(dict_a, **dict_b) def lift(f): return lambda x, state: (f(x), state) class _NoValue(object): def __repr__(self): return "NoValue" NO_VALUE = _NoValue() DefaultArgSpec = namedtuple('DefaultArgSpec', 'has_default default_value') def _get_default_arg(args, defaults, arg_index): """ Method that determines if an argument has default value or not, and if yes what is the default value for the argument :param args: array of arguments, eg: ['first_arg', 'second_arg', 'third_arg'] :param defaults: array of default values, eg: (42, 'something') :param arg_index: index of the argument in the argument array for which, this function checks if a default value exists or not. And if default value exists it would return the default value. Example argument: 1 :return: Tuple of whether there is a default or not, and if yes the default value, eg: for index 2 i.e. for "second_arg" this function returns (True, 42) """ if not defaults: return DefaultArgSpec(False, None) args_with_no_defaults = len(args) - len(defaults) if arg_index < args_with_no_defaults: return DefaultArgSpec(False, None) else: value = defaults[arg_index - args_with_no_defaults] if (type(value) is str): value = '"%s"' % value return DefaultArgSpec(True, value) def get_instance_methods(instance): for method_name in dir(instance): method = getattr(instance, method_name) if hasattr(method, '__call__'): yield method_name, method def _flatten_list(container): for i in container: if isinstance(i, list): for j in flatten_list(i): yield j else: yield i def flatten_list(container): return list(_flatten_list(container)) def _flatten(container): for i in container: if hasattr(i, '__iter__'): for j in _flatten(i): yield j else: yield i def flatten(container): return list(_flatten(container))
cgarciae/phi
phi/dsl.py
Expression.Pipe
python
def Pipe(self, *sequence, **kwargs): state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state)
`Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html)
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L468-L522
[ " def Seq(self, *sequence, **kwargs):\n \"\"\"\n`Seq` is used to express function composition. The expression\n\n Seq(f, g)\n\nbe equivalent to\n\n lambda x: g(f(x))\n\nAs you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted.\n\nIn general, the following rules apply for Seq:\n\n**General Sequence**\n\n Seq(f0, f1, ..., fn-1, fn)\n\nis equivalent to\n\n lambda x: fn(fn-1(...(f1(f0(x)))))\n\n**Single Function**\n\n Seq(f)\n\nis equivalent to\n\n f\n\n**Identity**\n\nThe empty Seq\n\n Seq()\n\nis equivalent to\n\n lambda x: x\n\n### Examples\n\n from phi import P, Seq\n\n f = Seq(\n P * 2,\n P + 1,\n P ** 2\n )\n\n assert f(1) == 9 # ((1 * 2) + 1) ** 2\n\nThe previous example using `P.Pipe`\n\n from phi import P\n\n assert 9 == P.Pipe(\n 1,\n P * 2, #1 * 2 == 2\n P + 1, #2 + 1 == 3\n P ** 2 #3 ** 2 == 9\n )\n \"\"\"\n fs = [ _parse(elem)._f for elem in sequence ]\n\n def g(x, state):\n return functools.reduce(lambda args, f: f(*args), fs, (x, state))\n\n return self.__then__(g, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.ThenAt
python
def ThenAt(self, n, f, *_args, **kwargs): _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type)
`ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt`
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L524-L638
[ "def lift(f):\n return lambda x, state: (f(x), state)\n", "def __then__(self, other, **kwargs):\n f = self._f\n g = other\n\n h = lambda x, state: g(*f(x, state))\n\n return self.__unit__(h, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Then0
python
def Then0(self, f, *args, **kwargs): return self.ThenAt(0, f, *args, **kwargs)
`Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L640-L644
[ " def ThenAt(self, n, f, *_args, **kwargs):\n \"\"\"\n`ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function.\n\n**Arguments**\n\n* **n**: position at which the created partial will apply its awaited argument on the original function.\n* **f**: function which the partial will be created.\n* **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`.\n* `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`.\n\nYou can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression\n\n D == fun(A, B, C)\n\nall the following are equivalent\n\n from phi import P, Pipe, ThenAt\n\n D == Pipe(A, ThenAt(1, fun, B, C))\n D == Pipe(B, ThenAt(2, fun, A, C))\n D == Pipe(C, ThenAt(3, fun, A, B))\n\nyou could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable\n\n from phi import P, Pipe\n\n D == Pipe(A, P.Then(fun, B, C))\n D == Pipe(B, P.Then2(fun, A, C))\n D == Pipe(C, P.Then3(fun, A, B))\n\nThere is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it\n\n from phi import P\n\n D == Pipe(None, P.ThenAt(0, fun, A, B, C))\n D == Pipe(None, P.Then0(fun, A, B, C))\n\n**Examples**\n\nMax of 6 and the argument:\n\n from phi import P\n\n assert 6 == P.Pipe(\n 2,\n P.Then(max, 6)\n )\n\nPrevious is equivalent to\n\n assert 6 == max(2, 6)\n\nOpen a file in read mode (`'r'`)\n\n from phi import P\n\n f = P.Pipe(\n \"file.txt\",\n P.Then(open, 'r')\n )\n\nPrevious is equivalent to\n\n f = open(\"file.txt\", 'r')\n\nSplit a string by whitespace and then get the length of each word\n\n from phi import P\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n P.Then(str.split, ' ')\n .Then2(map, len)\n )\n\nPrevious is equivalent to\n\n x = \"Again hello world\"\n\n x = str.split(x, ' ')\n x = map(len, x)\n\n assert [5, 5, 5] == x\n\nAs you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object\n\n from phi import P, Obj\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n Obj.split(' '),\n P.map(len)\n )\n\n**Also see**\n\n* `phi.builder.Builder.Obj`\n* [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html)\n* `phi.builder.Builder.RegisterAt`\n \"\"\"\n _return_type = None\n n_args = n - 1\n\n if '_return_type' in kwargs:\n _return_type = kwargs['_return_type']\n del kwargs['_return_type']\n\n @utils.lift\n def g(x):\n\n new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args\n return f(*new_args, **kwargs)\n\n return self.__then__(g, _return_type=_return_type)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Then
python
def Then(self, f, *args, **kwargs): return self.ThenAt(1, f, *args, **kwargs)
`Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L646-L650
[ " def ThenAt(self, n, f, *_args, **kwargs):\n \"\"\"\n`ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function.\n\n**Arguments**\n\n* **n**: position at which the created partial will apply its awaited argument on the original function.\n* **f**: function which the partial will be created.\n* **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`.\n* `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`.\n\nYou can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression\n\n D == fun(A, B, C)\n\nall the following are equivalent\n\n from phi import P, Pipe, ThenAt\n\n D == Pipe(A, ThenAt(1, fun, B, C))\n D == Pipe(B, ThenAt(2, fun, A, C))\n D == Pipe(C, ThenAt(3, fun, A, B))\n\nyou could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable\n\n from phi import P, Pipe\n\n D == Pipe(A, P.Then(fun, B, C))\n D == Pipe(B, P.Then2(fun, A, C))\n D == Pipe(C, P.Then3(fun, A, B))\n\nThere is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it\n\n from phi import P\n\n D == Pipe(None, P.ThenAt(0, fun, A, B, C))\n D == Pipe(None, P.Then0(fun, A, B, C))\n\n**Examples**\n\nMax of 6 and the argument:\n\n from phi import P\n\n assert 6 == P.Pipe(\n 2,\n P.Then(max, 6)\n )\n\nPrevious is equivalent to\n\n assert 6 == max(2, 6)\n\nOpen a file in read mode (`'r'`)\n\n from phi import P\n\n f = P.Pipe(\n \"file.txt\",\n P.Then(open, 'r')\n )\n\nPrevious is equivalent to\n\n f = open(\"file.txt\", 'r')\n\nSplit a string by whitespace and then get the length of each word\n\n from phi import P\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n P.Then(str.split, ' ')\n .Then2(map, len)\n )\n\nPrevious is equivalent to\n\n x = \"Again hello world\"\n\n x = str.split(x, ' ')\n x = map(len, x)\n\n assert [5, 5, 5] == x\n\nAs you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object\n\n from phi import P, Obj\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n Obj.split(' '),\n P.map(len)\n )\n\n**Also see**\n\n* `phi.builder.Builder.Obj`\n* [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html)\n* `phi.builder.Builder.RegisterAt`\n \"\"\"\n _return_type = None\n n_args = n - 1\n\n if '_return_type' in kwargs:\n _return_type = kwargs['_return_type']\n del kwargs['_return_type']\n\n @utils.lift\n def g(x):\n\n new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args\n return f(*new_args, **kwargs)\n\n return self.__then__(g, _return_type=_return_type)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Then2
python
def Then2(self, f, arg1, *args, **kwargs): args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs)
`Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L654-L659
[ " def ThenAt(self, n, f, *_args, **kwargs):\n \"\"\"\n`ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function.\n\n**Arguments**\n\n* **n**: position at which the created partial will apply its awaited argument on the original function.\n* **f**: function which the partial will be created.\n* **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`.\n* `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`.\n\nYou can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression\n\n D == fun(A, B, C)\n\nall the following are equivalent\n\n from phi import P, Pipe, ThenAt\n\n D == Pipe(A, ThenAt(1, fun, B, C))\n D == Pipe(B, ThenAt(2, fun, A, C))\n D == Pipe(C, ThenAt(3, fun, A, B))\n\nyou could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable\n\n from phi import P, Pipe\n\n D == Pipe(A, P.Then(fun, B, C))\n D == Pipe(B, P.Then2(fun, A, C))\n D == Pipe(C, P.Then3(fun, A, B))\n\nThere is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it\n\n from phi import P\n\n D == Pipe(None, P.ThenAt(0, fun, A, B, C))\n D == Pipe(None, P.Then0(fun, A, B, C))\n\n**Examples**\n\nMax of 6 and the argument:\n\n from phi import P\n\n assert 6 == P.Pipe(\n 2,\n P.Then(max, 6)\n )\n\nPrevious is equivalent to\n\n assert 6 == max(2, 6)\n\nOpen a file in read mode (`'r'`)\n\n from phi import P\n\n f = P.Pipe(\n \"file.txt\",\n P.Then(open, 'r')\n )\n\nPrevious is equivalent to\n\n f = open(\"file.txt\", 'r')\n\nSplit a string by whitespace and then get the length of each word\n\n from phi import P\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n P.Then(str.split, ' ')\n .Then2(map, len)\n )\n\nPrevious is equivalent to\n\n x = \"Again hello world\"\n\n x = str.split(x, ' ')\n x = map(len, x)\n\n assert [5, 5, 5] == x\n\nAs you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object\n\n from phi import P, Obj\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n Obj.split(' '),\n P.map(len)\n )\n\n**Also see**\n\n* `phi.builder.Builder.Obj`\n* [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html)\n* `phi.builder.Builder.RegisterAt`\n \"\"\"\n _return_type = None\n n_args = n - 1\n\n if '_return_type' in kwargs:\n _return_type = kwargs['_return_type']\n del kwargs['_return_type']\n\n @utils.lift\n def g(x):\n\n new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args\n return f(*new_args, **kwargs)\n\n return self.__then__(g, _return_type=_return_type)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Then3
python
def Then3(self, f, arg1, arg2, *args, **kwargs): args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs)
`Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L661-L666
[ " def ThenAt(self, n, f, *_args, **kwargs):\n \"\"\"\n`ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function.\n\n**Arguments**\n\n* **n**: position at which the created partial will apply its awaited argument on the original function.\n* **f**: function which the partial will be created.\n* **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`.\n* `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`.\n\nYou can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression\n\n D == fun(A, B, C)\n\nall the following are equivalent\n\n from phi import P, Pipe, ThenAt\n\n D == Pipe(A, ThenAt(1, fun, B, C))\n D == Pipe(B, ThenAt(2, fun, A, C))\n D == Pipe(C, ThenAt(3, fun, A, B))\n\nyou could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable\n\n from phi import P, Pipe\n\n D == Pipe(A, P.Then(fun, B, C))\n D == Pipe(B, P.Then2(fun, A, C))\n D == Pipe(C, P.Then3(fun, A, B))\n\nThere is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it\n\n from phi import P\n\n D == Pipe(None, P.ThenAt(0, fun, A, B, C))\n D == Pipe(None, P.Then0(fun, A, B, C))\n\n**Examples**\n\nMax of 6 and the argument:\n\n from phi import P\n\n assert 6 == P.Pipe(\n 2,\n P.Then(max, 6)\n )\n\nPrevious is equivalent to\n\n assert 6 == max(2, 6)\n\nOpen a file in read mode (`'r'`)\n\n from phi import P\n\n f = P.Pipe(\n \"file.txt\",\n P.Then(open, 'r')\n )\n\nPrevious is equivalent to\n\n f = open(\"file.txt\", 'r')\n\nSplit a string by whitespace and then get the length of each word\n\n from phi import P\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n P.Then(str.split, ' ')\n .Then2(map, len)\n )\n\nPrevious is equivalent to\n\n x = \"Again hello world\"\n\n x = str.split(x, ' ')\n x = map(len, x)\n\n assert [5, 5, 5] == x\n\nAs you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object\n\n from phi import P, Obj\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n Obj.split(' '),\n P.map(len)\n )\n\n**Also see**\n\n* `phi.builder.Builder.Obj`\n* [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html)\n* `phi.builder.Builder.RegisterAt`\n \"\"\"\n _return_type = None\n n_args = n - 1\n\n if '_return_type' in kwargs:\n _return_type = kwargs['_return_type']\n del kwargs['_return_type']\n\n @utils.lift\n def g(x):\n\n new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args\n return f(*new_args, **kwargs)\n\n return self.__then__(g, _return_type=_return_type)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Then4
python
def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs)
`Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L668-L673
[ " def ThenAt(self, n, f, *_args, **kwargs):\n \"\"\"\n`ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function.\n\n**Arguments**\n\n* **n**: position at which the created partial will apply its awaited argument on the original function.\n* **f**: function which the partial will be created.\n* **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`.\n* `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`.\n\nYou can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression\n\n D == fun(A, B, C)\n\nall the following are equivalent\n\n from phi import P, Pipe, ThenAt\n\n D == Pipe(A, ThenAt(1, fun, B, C))\n D == Pipe(B, ThenAt(2, fun, A, C))\n D == Pipe(C, ThenAt(3, fun, A, B))\n\nyou could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable\n\n from phi import P, Pipe\n\n D == Pipe(A, P.Then(fun, B, C))\n D == Pipe(B, P.Then2(fun, A, C))\n D == Pipe(C, P.Then3(fun, A, B))\n\nThere is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it\n\n from phi import P\n\n D == Pipe(None, P.ThenAt(0, fun, A, B, C))\n D == Pipe(None, P.Then0(fun, A, B, C))\n\n**Examples**\n\nMax of 6 and the argument:\n\n from phi import P\n\n assert 6 == P.Pipe(\n 2,\n P.Then(max, 6)\n )\n\nPrevious is equivalent to\n\n assert 6 == max(2, 6)\n\nOpen a file in read mode (`'r'`)\n\n from phi import P\n\n f = P.Pipe(\n \"file.txt\",\n P.Then(open, 'r')\n )\n\nPrevious is equivalent to\n\n f = open(\"file.txt\", 'r')\n\nSplit a string by whitespace and then get the length of each word\n\n from phi import P\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n P.Then(str.split, ' ')\n .Then2(map, len)\n )\n\nPrevious is equivalent to\n\n x = \"Again hello world\"\n\n x = str.split(x, ' ')\n x = map(len, x)\n\n assert [5, 5, 5] == x\n\nAs you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object\n\n from phi import P, Obj\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n Obj.split(' '),\n P.map(len)\n )\n\n**Also see**\n\n* `phi.builder.Builder.Obj`\n* [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html)\n* `phi.builder.Builder.RegisterAt`\n \"\"\"\n _return_type = None\n n_args = n - 1\n\n if '_return_type' in kwargs:\n _return_type = kwargs['_return_type']\n del kwargs['_return_type']\n\n @utils.lift\n def g(x):\n\n new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args\n return f(*new_args, **kwargs)\n\n return self.__then__(g, _return_type=_return_type)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Then5
python
def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs)
`Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L675-L680
[ " def ThenAt(self, n, f, *_args, **kwargs):\n \"\"\"\n`ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function.\n\n**Arguments**\n\n* **n**: position at which the created partial will apply its awaited argument on the original function.\n* **f**: function which the partial will be created.\n* **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`.\n* `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`.\n\nYou can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression\n\n D == fun(A, B, C)\n\nall the following are equivalent\n\n from phi import P, Pipe, ThenAt\n\n D == Pipe(A, ThenAt(1, fun, B, C))\n D == Pipe(B, ThenAt(2, fun, A, C))\n D == Pipe(C, ThenAt(3, fun, A, B))\n\nyou could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable\n\n from phi import P, Pipe\n\n D == Pipe(A, P.Then(fun, B, C))\n D == Pipe(B, P.Then2(fun, A, C))\n D == Pipe(C, P.Then3(fun, A, B))\n\nThere is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it\n\n from phi import P\n\n D == Pipe(None, P.ThenAt(0, fun, A, B, C))\n D == Pipe(None, P.Then0(fun, A, B, C))\n\n**Examples**\n\nMax of 6 and the argument:\n\n from phi import P\n\n assert 6 == P.Pipe(\n 2,\n P.Then(max, 6)\n )\n\nPrevious is equivalent to\n\n assert 6 == max(2, 6)\n\nOpen a file in read mode (`'r'`)\n\n from phi import P\n\n f = P.Pipe(\n \"file.txt\",\n P.Then(open, 'r')\n )\n\nPrevious is equivalent to\n\n f = open(\"file.txt\", 'r')\n\nSplit a string by whitespace and then get the length of each word\n\n from phi import P\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n P.Then(str.split, ' ')\n .Then2(map, len)\n )\n\nPrevious is equivalent to\n\n x = \"Again hello world\"\n\n x = str.split(x, ' ')\n x = map(len, x)\n\n assert [5, 5, 5] == x\n\nAs you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object\n\n from phi import P, Obj\n\n assert [5, 5, 5] == P.Pipe(\n \"Again hello world\",\n Obj.split(' '),\n P.map(len)\n )\n\n**Also see**\n\n* `phi.builder.Builder.Obj`\n* [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html)\n* `phi.builder.Builder.RegisterAt`\n \"\"\"\n _return_type = None\n n_args = n - 1\n\n if '_return_type' in kwargs:\n _return_type = kwargs['_return_type']\n del kwargs['_return_type']\n\n @utils.lift\n def g(x):\n\n new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args\n return f(*new_args, **kwargs)\n\n return self.__then__(g, _return_type=_return_type)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.List
python
def List(self, *branches, **kwargs): gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs)
While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html).
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L682-L793
[ "def __then__(self, other, **kwargs):\n f = self._f\n g = other\n\n h = lambda x, state: g(*f(x, state))\n\n return self.__unit__(h, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Seq
python
def Seq(self, *sequence, **kwargs): fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs)
`Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 )
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L801-L869
[ "def __then__(self, other, **kwargs):\n f = self._f\n g = other\n\n h = lambda x, state: g(*f(x, state))\n\n return self.__unit__(h, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.With
python
def With(self, context_manager, *body, **kwargs): context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs)
**With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read()
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L930-L973
[ "def _parse(code):\n\n #if type(code) is tuple:\n if isinstance(code, Expression):\n return code\n elif hasattr(code, '__call__') or isclass(code):\n return Expression(utils.lift(code))\n elif isinstance(code, list):\n return E.List(*code)\n elif isinstance(code, tuple):\n return E.Tuple(*code)\n elif isinstance(code, set):\n return E.Set(*code)\n elif isinstance(code, dict):\n return E.Dict(**code)\n else:\n return E.Val(code)\n", "def __then__(self, other, **kwargs):\n f = self._f\n g = other\n\n h = lambda x, state: g(*f(x, state))\n\n return self.__unit__(h, **kwargs)\n", " def Seq(self, *sequence, **kwargs):\n \"\"\"\n`Seq` is used to express function composition. The expression\n\n Seq(f, g)\n\nbe equivalent to\n\n lambda x: g(f(x))\n\nAs you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted.\n\nIn general, the following rules apply for Seq:\n\n**General Sequence**\n\n Seq(f0, f1, ..., fn-1, fn)\n\nis equivalent to\n\n lambda x: fn(fn-1(...(f1(f0(x)))))\n\n**Single Function**\n\n Seq(f)\n\nis equivalent to\n\n f\n\n**Identity**\n\nThe empty Seq\n\n Seq()\n\nis equivalent to\n\n lambda x: x\n\n### Examples\n\n from phi import P, Seq\n\n f = Seq(\n P * 2,\n P + 1,\n P ** 2\n )\n\n assert f(1) == 9 # ((1 * 2) + 1) ** 2\n\nThe previous example using `P.Pipe`\n\n from phi import P\n\n assert 9 == P.Pipe(\n 1,\n P * 2, #1 * 2 == 2\n P + 1, #2 + 1 == 3\n P ** 2 #3 ** 2 == 9\n )\n \"\"\"\n fs = [ _parse(elem)._f for elem in sequence ]\n\n def g(x, state):\n return functools.reduce(lambda args, f: f(*args), fs, (x, state))\n\n return self.__then__(g, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.ReadList
python
def ReadList(self, *branches, **kwargs): branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs)
Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L1021-L1027
[ " def List(self, *branches, **kwargs):\n \"\"\"\nWhile `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion.\n\nThe expression\n\n k = List(f, g)\n\nis equivalent to\n\n k = lambda x: [ f(x), g(x) ]\n\n\nIn general, the following rules apply after compilation:\n\n**General Branching**\n\n List(f0, f1, ..., fn)\n\nis equivalent to\n\n lambda x: [ f0(x), f1(x), ..., fn(x) ]\n\n\n**Composing & Branching**\n\nIt is interesting to see how braching interacts with composing. The expression\n\n Seq(f, List(g, h))\n\nis *almost* equivalent to\n\n List( Seq(f, g), Seq(f, h) )\n\nAs you see its as if `f` where distributed over the List. We say *almost* because their implementation is different\n\n def _lambda(x):\n x = f(x)\n return [ g(x), h(x) ]\n\nvs\n\n lambda x: [ g(f(x)), h(f(x)) ]\n\nAs you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function.\n\n### Examples\n\n form phi import P, List\n\n avg_word_length = P.Pipe(\n \"1 22 333\",\n lambda s: s.split(' '), # ['1', '22', '333']\n lambda l: map(len, l), # [1, 2, 3]\n List(\n sum # 1 + 2 + 3 == 6\n ,\n len # len([1, 2, 3]) == 3\n ),\n lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2\n )\n\n assert avg_word_length == 2\n\nThe previous could also be done more briefly like this\n\n form phi import P, Obj, List\n\n avg_word_length = P.Pipe(\n \"1 22 333\", Obj\n .split(' ') # ['1', '22', '333']\n .map(len) # [1, 2, 3]\n .List(\n sum #sum([1, 2, 3]) == 6\n ,\n len #len([1, 2, 3]) == 3\n ),\n P[0] / P[1] #6 / 3 == 2\n )\n\n assert avg_word_length == 2\n\nIn the example above the last expression\n\n P[0] / P[1]\n\nworks for a couple of reasons\n\n1. The previous expression returns a list\n2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]`\n3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression\n\n (P * 2) / (P + 1)\n\ncompile to a function of the form\n\n lambda x: (x * 2) / (x + 1)\n\nCheck out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html).\n\n \"\"\"\n gs = [ _parse(code)._f for code in branches ]\n\n def h(x, state):\n ys = []\n for g in gs:\n y, state = g(x, state)\n ys.append(y)\n\n return (ys, state)\n\n return self.__then__(h, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Write
python
def Write(self, *state_args, **state_dict): if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g)
See `phi.dsl.Expression.Read`
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L1030-L1061
[ "def __then__(self, other, **kwargs):\n f = self._f\n g = other\n\n h = lambda x, state: g(*f(x, state))\n\n return self.__unit__(h, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Val
python
def Val(self, val, **kwargs): f = utils.lift(lambda z: val) return self.__then__(f, **kwargs)
The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it.
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L1181-L1203
[ "def lift(f):\n return lambda x, state: (f(x), state)\n", "def __then__(self, other, **kwargs):\n f = self._f\n g = other\n\n h = lambda x, state: g(*f(x, state))\n\n return self.__unit__(h, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.If
python
def If(self, condition, *then, **kwargs): cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr
**If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) )
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L1206-L1253
[ "def _parse(code):\n\n #if type(code) is tuple:\n if isinstance(code, Expression):\n return code\n elif hasattr(code, '__call__') or isclass(code):\n return Expression(utils.lift(code))\n elif isinstance(code, list):\n return E.List(*code)\n elif isinstance(code, tuple):\n return E.Tuple(*code)\n elif isinstance(code, set):\n return E.Set(*code)\n elif isinstance(code, dict):\n return E.Dict(**code)\n else:\n return E.Val(code)\n", "def _compile_if(ast):\n if hasattr(ast, \"__call__\"):\n return ast\n\n cond, then, Else = ast\n\n Else = _compile_if(Else)\n\n def g(x, state):\n y_cond, state = cond(x, state)\n\n return then(x, state) if y_cond else Else(x, state)\n\n return g\n", "def __then__(self, other, **kwargs):\n f = self._f\n g = other\n\n h = lambda x, state: g(*f(x, state))\n\n return self.__unit__(h, **kwargs)\n", " def Seq(self, *sequence, **kwargs):\n \"\"\"\n`Seq` is used to express function composition. The expression\n\n Seq(f, g)\n\nbe equivalent to\n\n lambda x: g(f(x))\n\nAs you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted.\n\nIn general, the following rules apply for Seq:\n\n**General Sequence**\n\n Seq(f0, f1, ..., fn-1, fn)\n\nis equivalent to\n\n lambda x: fn(fn-1(...(f1(f0(x)))))\n\n**Single Function**\n\n Seq(f)\n\nis equivalent to\n\n f\n\n**Identity**\n\nThe empty Seq\n\n Seq()\n\nis equivalent to\n\n lambda x: x\n\n### Examples\n\n from phi import P, Seq\n\n f = Seq(\n P * 2,\n P + 1,\n P ** 2\n )\n\n assert f(1) == 9 # ((1 * 2) + 1) ** 2\n\nThe previous example using `P.Pipe`\n\n from phi import P\n\n assert 9 == P.Pipe(\n 1,\n P * 2, #1 * 2 == 2\n P + 1, #2 + 1 == 3\n P ** 2 #3 ** 2 == 9\n )\n \"\"\"\n fs = [ _parse(elem)._f for elem in sequence ]\n\n def g(x, state):\n return functools.reduce(lambda args, f: f(*args), fs, (x, state))\n\n return self.__then__(g, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def Else(self, *Else, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs) #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
cgarciae/phi
phi/dsl.py
Expression.Else
python
def Else(self, *Else, **kwargs): root = self._root ast = self._ast next_else = E.Seq(*Else)._f ast = _add_else(ast, next_else) g = _compile_if(ast) return root.__then__(g, **kwargs)
See `phi.dsl.Expression.If`
train
https://github.com/cgarciae/phi/blob/87fd7100a76f823232f4fd8360498b4b80675265/phi/dsl.py#L1255-L1265
[ "def _compile_if(ast):\n if hasattr(ast, \"__call__\"):\n return ast\n\n cond, then, Else = ast\n\n Else = _compile_if(Else)\n\n def g(x, state):\n y_cond, state = cond(x, state)\n\n return then(x, state) if y_cond else Else(x, state)\n\n return g\n", "def _add_else(ast, next_else):\n\n if hasattr(ast, \"__call__\"):\n return next_else\n\n cond, then, Else = ast\n\n return (cond, then, _add_else(Else, next_else))\n", " def Seq(self, *sequence, **kwargs):\n \"\"\"\n`Seq` is used to express function composition. The expression\n\n Seq(f, g)\n\nbe equivalent to\n\n lambda x: g(f(x))\n\nAs you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted.\n\nIn general, the following rules apply for Seq:\n\n**General Sequence**\n\n Seq(f0, f1, ..., fn-1, fn)\n\nis equivalent to\n\n lambda x: fn(fn-1(...(f1(f0(x)))))\n\n**Single Function**\n\n Seq(f)\n\nis equivalent to\n\n f\n\n**Identity**\n\nThe empty Seq\n\n Seq()\n\nis equivalent to\n\n lambda x: x\n\n### Examples\n\n from phi import P, Seq\n\n f = Seq(\n P * 2,\n P + 1,\n P ** 2\n )\n\n assert f(1) == 9 # ((1 * 2) + 1) ** 2\n\nThe previous example using `P.Pipe`\n\n from phi import P\n\n assert 9 == P.Pipe(\n 1,\n P * 2, #1 * 2 == 2\n P + 1, #2 + 1 == 3\n P ** 2 #3 ** 2 == 9\n )\n \"\"\"\n fs = [ _parse(elem)._f for elem in sequence ]\n\n def g(x, state):\n return functools.reduce(lambda args, f: f(*args), fs, (x, state))\n\n return self.__then__(g, **kwargs)\n" ]
class Expression(object): """ All elements of this language are callables (implement `__call__`) of arity 1. ** Examples ** Compiling a function just returns back the function Seq(f) == f and piping through a function is just the same a applying the function Pipe(x, f) == f(x) """ def __init__(self, f=utils.state_identity): self._f = f def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __hash__(self): return hash(self._f) def F(self, expr): return self >> expr def Pipe(self, *sequence, **kwargs): """ `Pipe` runs any `phi.dsl.Expression`. Its highly inspired by Elixir's [|> (pipe)](https://hexdocs.pm/elixir/Kernel.html#%7C%3E/2) operator. **Arguments** * ***sequence**: any variable amount of expressions. All expressions inside of `sequence` will be composed together using `phi.dsl.Expression.Seq`. * ****kwargs**: `Pipe` forwards all `kwargs` to `phi.builder.Builder.Seq`, visit its documentation for more info. The expression Pipe(*sequence, **kwargs) is equivalent to Seq(*sequence, **kwargs)(None) Normally the first argument or `Pipe` is a value, that is reinterpreted as a `phi.dsl.Expression.Val`, therfore, the input `None` is discarded. **Examples** from phi import P def add1(x): return x + 1 def mul3(x): return x * 3 x = P.Pipe( 1, #input add1, #1 + 1 == 2 mul3 #2 * 3 == 6 ) assert x == 6 The previous using [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) to create the functions from phi import P x = P.Pipe( 1, #input P + 1, #1 + 1 == 2 P * 3 #2 * 3 == 6 ) assert x == 6 **Also see** * `phi.builder.Builder.Seq` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) * [Compile](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Compile) * [lambdas](https://cgarciae.github.io/phi/lambdas.m.html) """ state = kwargs.pop("refs", {}) return self.Seq(*sequence, **kwargs)(None, **state) def ThenAt(self, n, f, *_args, **kwargs): """ `ThenAt` enables you to create a partially apply many arguments to a function, the returned partial expects a single arguments which will be applied at the `n`th position of the original function. **Arguments** * **n**: position at which the created partial will apply its awaited argument on the original function. * **f**: function which the partial will be created. * **_args & kwargs**: all `*_args` and `**kwargs` will be passed to the function `f`. * `_return_type = None`: type of the returned `builder`, if `None` it will return the same type of the current `builder`. This special kwarg will NOT be passed to `f`. You can think of `n` as the position that the value being piped down will pass through the `f`. Say you have the following expression D == fun(A, B, C) all the following are equivalent from phi import P, Pipe, ThenAt D == Pipe(A, ThenAt(1, fun, B, C)) D == Pipe(B, ThenAt(2, fun, A, C)) D == Pipe(C, ThenAt(3, fun, A, B)) you could also use the shortcuts `Then`, `Then2`,..., `Then5`, which are more readable from phi import P, Pipe D == Pipe(A, P.Then(fun, B, C)) D == Pipe(B, P.Then2(fun, A, C)) D == Pipe(C, P.Then3(fun, A, B)) There is a special case not discussed above: `n = 0`. When this happens only the arguments given will be applied to `f`, this method it will return a partial that expects a single argument but completely ignores it from phi import P D == Pipe(None, P.ThenAt(0, fun, A, B, C)) D == Pipe(None, P.Then0(fun, A, B, C)) **Examples** Max of 6 and the argument: from phi import P assert 6 == P.Pipe( 2, P.Then(max, 6) ) Previous is equivalent to assert 6 == max(2, 6) Open a file in read mode (`'r'`) from phi import P f = P.Pipe( "file.txt", P.Then(open, 'r') ) Previous is equivalent to f = open("file.txt", 'r') Split a string by whitespace and then get the length of each word from phi import P assert [5, 5, 5] == P.Pipe( "Again hello world", P.Then(str.split, ' ') .Then2(map, len) ) Previous is equivalent to x = "Again hello world" x = str.split(x, ' ') x = map(len, x) assert [5, 5, 5] == x As you see, `Then2` was very useful because `map` accepts and `iterable` as its `2nd` parameter. You can rewrite the previous using the [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) and the `phi.builder.Builder.Obj` object from phi import P, Obj assert [5, 5, 5] == P.Pipe( "Again hello world", Obj.split(' '), P.map(len) ) **Also see** * `phi.builder.Builder.Obj` * [PythonBuilder](https://cgarciae.github.io/phi/python_builder.m.html) * `phi.builder.Builder.RegisterAt` """ _return_type = None n_args = n - 1 if '_return_type' in kwargs: _return_type = kwargs['_return_type'] del kwargs['_return_type'] @utils.lift def g(x): new_args = _args[0:n_args] + (x,) + _args[n_args:] if n_args >= 0 else _args return f(*new_args, **kwargs) return self.__then__(g, _return_type=_return_type) def Then0(self, f, *args, **kwargs): """ `Then0(f, ...)` is equivalent to `ThenAt(0, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(0, f, *args, **kwargs) def Then(self, f, *args, **kwargs): """ `Then(f, ...)` is equivalent to `ThenAt(1, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ return self.ThenAt(1, f, *args, **kwargs) Then1 = Then def Then2(self, f, arg1, *args, **kwargs): """ `Then2(f, ...)` is equivalent to `ThenAt(2, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1,) + args return self.ThenAt(2, f, *args, **kwargs) def Then3(self, f, arg1, arg2, *args, **kwargs): """ `Then3(f, ...)` is equivalent to `ThenAt(3, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2) + args return self.ThenAt(3, f, *args, **kwargs) def Then4(self, f, arg1, arg2, arg3, *args, **kwargs): """ `Then4(f, ...)` is equivalent to `ThenAt(4, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3) + args return self.ThenAt(4, f, *args, **kwargs) def Then5(self, f, arg1, arg2, arg3, arg4, *args, **kwargs): """ `Then5(f, ...)` is equivalent to `ThenAt(5, f, ...)`. Checkout `phi.builder.Builder.ThenAt` for more information. """ args = (arg1, arg2, arg3, arg4) + args return self.ThenAt(5, f, *args, **kwargs) def List(self, *branches, **kwargs): """ While `Seq` is sequential, `phi.dsl.Expression.List` allows you to split the computation and get back a list with the result of each path. While the list literal should be the most incarnation of this expresion, it can actually be any iterable (implements `__iter__`) that is not a tuple and yields a valid expresion. The expression k = List(f, g) is equivalent to k = lambda x: [ f(x), g(x) ] In general, the following rules apply after compilation: **General Branching** List(f0, f1, ..., fn) is equivalent to lambda x: [ f0(x), f1(x), ..., fn(x) ] **Composing & Branching** It is interesting to see how braching interacts with composing. The expression Seq(f, List(g, h)) is *almost* equivalent to List( Seq(f, g), Seq(f, h) ) As you see its as if `f` where distributed over the List. We say *almost* because their implementation is different def _lambda(x): x = f(x) return [ g(x), h(x) ] vs lambda x: [ g(f(x)), h(f(x)) ] As you see `f` is only executed once in the first one. Both should yield the same result if `f` is a pure function. ### Examples form phi import P, List avg_word_length = P.Pipe( "1 22 333", lambda s: s.split(' '), # ['1', '22', '333'] lambda l: map(len, l), # [1, 2, 3] List( sum # 1 + 2 + 3 == 6 , len # len([1, 2, 3]) == 3 ), lambda l: l[0] / l[1] # sum / len == 6 / 3 == 2 ) assert avg_word_length == 2 The previous could also be done more briefly like this form phi import P, Obj, List avg_word_length = P.Pipe( "1 22 333", Obj .split(' ') # ['1', '22', '333'] .map(len) # [1, 2, 3] .List( sum #sum([1, 2, 3]) == 6 , len #len([1, 2, 3]) == 3 ), P[0] / P[1] #6 / 3 == 2 ) assert avg_word_length == 2 In the example above the last expression P[0] / P[1] works for a couple of reasons 1. The previous expression returns a list 2. In general the expression `P[x]` compiles to a function with the form `lambda obj: obj[x]` 3. The class `Expression` (the class from which the object `P` inherits) overrides most operators to create functions easily. For example, the expression (P * 2) / (P + 1) compile to a function of the form lambda x: (x * 2) / (x + 1) Check out the documentatio for Phi [lambdas](https://cgarciae.github.io/phi/lambdas.m.html). """ gs = [ _parse(code)._f for code in branches ] def h(x, state): ys = [] for g in gs: y, state = g(x, state) ys.append(y) return (ys, state) return self.__then__(h, **kwargs) def Tuple(self, *expressions, **kwargs): return self.List(*expressions) >> tuple def Set(self, *expressions, **kwargs): return self.List(*expressions) >> set def Seq(self, *sequence, **kwargs): """ `Seq` is used to express function composition. The expression Seq(f, g) be equivalent to lambda x: g(f(x)) As you see, its a little different from the mathematical definition. Excecution order flow from left to right, this makes reading and reasoning about code way more easy. This bahaviour is based upon the `|>` (pipe) operator found in languages like F#, Elixir and Elm. You can pack as many expressions as you like and they will be applied in order to the data that is passed through them when compiled an excecuted. In general, the following rules apply for Seq: **General Sequence** Seq(f0, f1, ..., fn-1, fn) is equivalent to lambda x: fn(fn-1(...(f1(f0(x))))) **Single Function** Seq(f) is equivalent to f **Identity** The empty Seq Seq() is equivalent to lambda x: x ### Examples from phi import P, Seq f = Seq( P * 2, P + 1, P ** 2 ) assert f(1) == 9 # ((1 * 2) + 1) ** 2 The previous example using `P.Pipe` from phi import P assert 9 == P.Pipe( 1, P * 2, #1 * 2 == 2 P + 1, #2 + 1 == 3 P ** 2 #3 ** 2 == 9 ) """ fs = [ _parse(elem)._f for elem in sequence ] def g(x, state): return functools.reduce(lambda args, f: f(*args), fs, (x, state)) return self.__then__(g, **kwargs) def Dict(self, **branches): gs = { key : _parse(value)._f for key, value in branches.items() } def h(x, state): ys = {} for key, g in gs.items(): y, state = g(x, state) ys[key] = y return _RecordObject(**ys), state return self.__then__(h) @property def Rec(self): """ `phi.dsl.Expression.List` provides you a way to branch the computation as a list, but access to the values of each branch are then done by index, this might be a little inconvenient because it reduces readability. `Rec` branches provide a way to create named branches via `Rec(**kwargs)` where the keys are the names of the branches and the values are valid expressions representing the computation of that branch. A special object is returned by this expression when excecuted, this object derives from `dict` and fully emulates it so you can treat it as such, however it also implements the `__getattr__` method, this lets you access a value as if it where a field ### Examples from phi import P, Rec stats = P.Pipe( [1,2,3], Rec( sum = sum , len = len ) ) assert stats.sum == 6 assert stats.len == 3 assert stats['sum'] == 6 assert stats['len'] == 3 Now lets image that we want to find the average value of the list, we could calculate it outside of the pipe doing something like `avg = stats.sum / stats.len`, however we could also do it inside the pipe using `Rec` field access lambdas from phi import P, Rec avg = P.Pipe( [1,2,3], Rec( sum = sum #6 , len = len #3 ), Rec.sum / Rec.len #6 / 3 == 2 ) assert avg == 2 """ return _RecordProxy(self) def With(self, context_manager, *body, **kwargs): """ **With** def With(context_manager, *body): **Arguments** * **context_manager**: a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) object or valid expression from the DSL that returns a context manager. * ***body**: any valid expression of the DSL to be evaluated inside the context. `*body` is interpreted as a tuple so all expression contained are composed. As with normal python programs you sometimes might want to create a context for a block of code. You normally give a [context manager](https://docs.python.org/2/reference/datamodel.html#context-managers) to the [with](https://docs.python.org/2/reference/compound_stmts.html#the-with-statement) statemente, in Phi you use `P.With` or `phi.With` **Context** Python's `with` statemente returns a context object through `as` keyword, in the DSL this object can be obtained using the `P.Context` method or the `phi.Context` function. ### Examples from phi import P, Obj, Context, With, Pipe text = Pipe( "text.txt", With( open, Context, Obj.read() ) ) The previous is equivalent to with open("text.txt") as f: text = f.read() """ context_f = _parse(context_manager)._f body_f = E.Seq(*body)._f def g(x, state): context, state = context_f(x, state) with context as scope: with _WithContextManager(scope): return body_f(x, state) return self.__then__(g, **kwargs) @property def Read(self): """ Giving names and saving parts of your computation to use then latter is useful to say the least. In Phi the expression Write(x = expr) creates a reference `x` given the value of `expr` which you can call latter. To read the previous you would use any of the following expressions Read('x') Read.x ### Example Lets see a common situation where you would use this from phi import P, List, Seq, Read, Write result = P.Pipe( input, Write(ref = f1), f2, List( f3 , Seq( Read('ref'), f4 ) ) ) Here you *save* the value outputed by `fun_1` and the load it as the initial value of the second branch. In normal python the previous would be *almost* equivalent to x = f1(input) ref = x x = f2(x) result = [ f3(x) , f4(ref) ] """ return _ReadProxy(self) def ReadList(self, *branches, **kwargs): """ Same as `phi.dsl.Expression.List` but any string argument `x` is translated to `Read(x)`. """ branches = map(lambda x: E.Read(x) if isinstance(x, str) else x, branches) return self.List(*branches, **kwargs) def Write(self, *state_args, **state_dict): """See `phi.dsl.Expression.Read`""" if len(state_dict) + len(state_args) < 1: raise Exception("Please include at-least 1 state variable, got {0} and {1}".format(state_args, state_dict)) if len(state_dict) > 1: raise Exception("Please include at-most 1 keyword argument expression, got {0}".format(state_dict)) if len(state_dict) > 0: state_key = next(iter(state_dict.keys())) write_expr = state_dict[state_key] state_args += (state_key,) expr = self >> write_expr else: expr = self def g(x, state): update = { key: x for key in state_args } state = utils.merge(state, update) #side effect for convenience _StateContextManager.REFS.update(state) return x, state return expr.__then__(g) @property def Rec(self): """ `Rec` is a `property` that returns an object that defines the `__getattr__` and `__getitem__` methods which when called help you create lambdas that emulates a field access. The following expression Rec.some_field is equivalent to lambda rec: rec.some_field **Examples** from phi import P, Obj, Rec class Point(object): def __init__(self, x, y): self.x = x self.y = y def flip_cords(self): y = self.y self.y = self.x self.x = y assert 4 == P.Pipe( Point(1, 2), # point(x=1, y=2) Obj.flip_cords(), # point(x=2, y=1) Rec.x, # point.x = 2 P * 2 # 2 * 2 = 4 ) **Also see** * `phi.builder.Builder.Obj` * `phi.builder.Builder.Read` * `phi.builder.Builder.Write` """ return _RecordProxy(self) @property def Obj(self): """ `Obj` is a `property` that returns an object that defines the `__getattr__` method which when called helps you create a partial that emulates a method call. The following expression Obj.some_method(x1, x2, ...) is equivalent to lambda obj: obj.some_method(x1, x2, ...) **Examples** from phi import P, Obj assert "hello world" == P.Pipe( " HELLO HELLO {0} ", Obj.format("WORLD"), # " HELLO HELLO WORLD " Obj.strip(), # "HELLO HELLO WORLD" Obj.lower() # "hello hello world" Obj.split(' ') # ["hello", "hello", "world"] Obj.count("hello") # 2 ) **Also see** * `phi.builder.Builder.Rec` * [dsl.Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) * `phi.builder.Builder.Write` """ return _ObjectProxy(self) @property def Ref(self): """ Returns an object that helps you to inmediatly create and [read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) [references](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref). **Creating Refences** You can manually create a [Ref](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Ref) outside the DSL using `Ref` and then pass to as/to a [Read](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Read) or [Write](https://cgarciae.github.io/phi/dsl.m.html#phi.dsl.Write) expression. Here is a contrived example from phi import P r = P.Ref('r') assert [600, 3, 6] == P.Pipe( 2, P + 1, {'a'}, # a = 2 + 1 = 3 P * 2, {'b'}, # b = 3 * 2 = 6 P * 100, {'c', r }, # c = r = 6 * 100 = 600 ['c', 'a', 'b'] ) assert r() == 600 **Reading Refences from the Current Context** While the expression `Read.a` with return a function that will discard its argument and return the value of the reference `x` in the current context, the expression `Ref.x` will return the value inmediatly, this is useful when using it inside pyton lambdas. Read.x(None) <=> Ref.x As an example from phi import P, Obj, Ref assert {'a': 97, 'b': 98, 'c': 99} == P.Pipe( "a b c", Obj .split(' ').Write.keys # keys = ['a', 'b', 'c'] .map(ord), # [ord('a'), ord('b'), ord('c')] == [97, 98, 99] lambda it: zip(Ref.keys, it), # [('a', 97), ('b', 98), ('c', 99)] dict # {'a': 97, 'b': 98, 'c': 99} ) """ return _RefProxyInstance def Val(self, val, **kwargs): """ The expression Val(a) is equivalent to the constant function lambda x: a All expression in this module interprete values that are not functions as constant functions using `Val`, for example Seq(1, P + 1) is equivalent to Seq(Val(1), P + 1) The previous expression as a whole is a constant function since it will return `2` no matter what input you give it. """ f = utils.lift(lambda z: val) return self.__then__(f, **kwargs) def If(self, condition, *then, **kwargs): """ **If** If(Predicate, *Then) Having conditionals expressions a necesity in every language, Phi includes the `If` expression for such a purpose. **Arguments** * **Predicate** : a predicate expression uses to determine if the `Then` or `Else` branches should be used. * ***Then** : an expression to be excecuted if the `Predicate` yields `True`, since this parameter is variadic you can stack expression and they will be interpreted as a tuple `phi.dsl.Seq`. This class also includes the `Elif` and `Else` methods which let you write branched conditionals in sequence, however the following rules apply * If no branch is entered the whole expression behaves like the identity * `Elif` can only be used after an `If` or another `Elif` expression * Many `Elif` expressions can be stacked sequentially * `Else` can only be used after an `If` or `Elif` expression ** Examples ** from phi import P, If assert "Between 2 and 10" == P.Pipe( 5, If(P > 10, "Greater than 10" ).Elif(P < 2, "Less than 2" ).Else( "Between 2 and 10" ) ) """ cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity ast = (cond_f, then_f, else_f) g = _compile_if(ast) expr = self.__then__(g, **kwargs) expr._ast = ast expr._root = self return expr #Else.__doc__ = If.__doc__ def Elif(self, condition, *then, **kwargs): """See `phi.dsl.Expression.If`""" root = self._root ast = self._ast cond_f = _parse(condition)._f then_f = E.Seq(*then)._f else_f = utils.state_identity next_else = (cond_f, then_f, else_f) ast = _add_else(ast, next_else) g = _compile_if(ast) expr = root.__then__(g, **kwargs) expr._ast = ast expr._root = root return expr #Elif.__doc__ = If.__doc__ @staticmethod def Context(*args): """ **Builder Core**. Also available as a global function as `phi.Context`. Returns the context object of the current `dsl.With` statemente. **Arguments** * ***args**: By design `Context` accepts any number of arguments and completely ignores them. This is a classmethod and it doesnt return a `Builder`/`Expression` by design so it can be called directly: from phi import P, Context, Obj def read_file(z): f = Context() return f.read() lines = P.Pipe( "text.txt", P.With( open, read_file, Obj.split("\\n") ) ) Here we called `Context` with no arguments to get the context back, however, since you can also give this function an argument (which it will ignore) it can be passed to the DSL so we can rewrite the previous as: from phi import P, Context, Obj lines = P.Pipe( "text.txt", P.With( open, Context, # f Obj.read() Obj.split("\\n") ) ) `Context` yields an exception when used outside of a `With` block. **Also see** * `phi.builder.Builder.Obj` * [dsl](https://cgarciae.github.io/phi/dsl.m.html) """ if _WithContextManager.WITH_GLOBAL_CONTEXT is utils.NO_VALUE: raise Exception("Cannot use 'Context' outside of a 'With' block") return _WithContextManager.WITH_GLOBAL_CONTEXT ############### ## Operators ############### def __rshift__(self, other): f = _parse(other)._f return self.__then__(f) def __rrshift__(self, prev): prev = _parse(prev) return prev.__then__(self._f) __rlshift__ = __rshift__ __lshift__ = __rrshift__ ## The Rest def __unit__(self, f, _return_type=None): "Monadic unit, also known as `return`" if _return_type: return _return_type(f) else: return self.__class__(f) def __then__(self, other, **kwargs): f = self._f g = other h = lambda x, state: g(*f(x, state)) return self.__unit__(h, **kwargs) ## Override operators def __call__(self, __x__, *__return_state__, **state): x = __x__ return_state = __return_state__ if len(return_state) == 1 and type(return_state[0]) is not bool: raise Exception("Invalid return state condition, got {return_state}".format(return_state=return_state)) with _StateContextManager(state): y, next_state = self._f(x, state) return (y, next_state) if len(return_state) >= 1 and return_state[0] else y def __getitem__(self, key): f = utils.lift(lambda x: x[key]) return self.__then__(f) __add__ = _fmap(operator.add) __mul__ = _fmap(operator.mul) __sub__ = _fmap(operator.sub) __mod__ = _fmap(operator.mod) __pow__ = _fmap(operator.pow) __and__ = _fmap(operator.and_) __or__ = _fmap(operator.or_) __xor__ = _fmap(operator.xor) __div__ = _fmap(operator.truediv) __divmod__ = _fmap(divmod) __floordiv__ = _fmap(operator.floordiv) __truediv__ = _fmap(operator.truediv) __contains__ = _fmap(operator.contains) __lt__ = _fmap(operator.lt) __le__ = _fmap(operator.le) __gt__ = _fmap(operator.gt) __ge__ = _fmap(operator.ge) __eq__ = _fmap(operator.eq) __ne__ = _fmap(operator.ne) __neg__ = _unary_fmap(operator.neg) __pos__ = _unary_fmap(operator.pos) __invert__ = _unary_fmap(operator.invert) __radd__ = _fmap_flip(operator.add) __rmul__ = _fmap_flip(operator.mul) __rsub__ = _fmap_flip(operator.sub) __rmod__ = _fmap_flip(operator.mod) __rpow__ = _fmap_flip(operator.pow) __rdiv__ = _fmap_flip(operator.truediv) __rdivmod__ = _fmap_flip(divmod) __rtruediv__ = _fmap_flip(operator.truediv) __rfloordiv__ = _fmap_flip(operator.floordiv) __rand__ = _fmap_flip(operator.and_) __ror__ = _fmap_flip(operator.or_) __rxor__ = _fmap_flip(operator.xor)
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
preservesurrogates
python
def preservesurrogates(s): if not isinstance(s, six.text_type): raise TypeError(u"String to split must be of type 'unicode'!") surrogates_regex_str = u"[{0}-{1}][{2}-{3}]".format(HIGH_SURROGATE_START, HIGH_SURROGATE_END, LOW_SURROGATE_START, LOW_SURROGATE_END) surrogates_regex = re.compile(u"(?:{0})|.".format(surrogates_regex_str)) return surrogates_regex.findall(s)
Function for splitting a string into a list of characters, preserving surrogate pairs. In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character u"\U0001e900" is stored as the surrogate pair u"\ud83a\udd00": s = u"AB\U0001e900CD" len(s) -> 6 list(s) -> [u'A', u'B', u'\ud83a', u'\udd00', u'C', 'D'] len(preservesurrogates(s)) -> 5 list(preservesurrogates(s)) -> [u'A', u'B', u'\U0001e900', u'C', u'D'] :param s: String to split :return: List of characters
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L23-L46
null
from collections import defaultdict, namedtuple, OrderedDict from fractions import Fraction import codecs import os import re import struct import six try: # Python 2 style import from hangulutil import _get_hangul_syllable_name except ImportError: # Python 3 style import from .hangulutil import _get_hangul_syllable_name #: Ranges of surrogate pairs HIGH_SURROGATE_START = u"\ud800" HIGH_SURROGATE_END = u"\udbff" LOW_SURROGATE_START = u"\udc00" LOW_SURROGATE_END = u"\udfff" def _unichr(i): """ Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character """ if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: # Workaround the error "ValueError: unichr() arg not in range(0x10000) (narrow Python build)" return struct.pack("i", i).decode("utf-32") def _hexstr_to_unichr(s): """ Helper function for taking a hex string and returning a Unicode character. :param s: hex string to convert :return: Unicode character """ return _unichr(int(s, 16)) def _padded_hex(i, pad_width=4, uppercase=True): """ Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes until the string is of the specified width. For example: _padded_hex(31, pad_width=4, uppercase=True) -> "001F" :param i: integer to convert to a hex string :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0' as needed. :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True). :return: Hex string representation of the input integer. """ result = hex(i)[2:] # Remove the leading "0x" if uppercase: result = result.upper() return result.zfill(pad_width) def _uax44lm2transform(s): """ Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>. The rule is defined as follows: "UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in U+1180 HANGUL JUNGSEONG O-E." Therefore, correctly implementing the rule involves performing the following three operations, in order: 1. remove all medial hyphens (except the medial hyphen in the name for U+1180) 2. remove all whitespace and underscore characters 3. apply toLowercase() to both strings A "medial hyphen" is defined as follows (quoted from the above referenced web page): "In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently occur medially as a result of removing whitespace before removing hyphens in a particular implementation of matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in loose matching." :param s: String to transform :return: String transformed per UAX44-LM2 loose matching rule. """ result = s # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the # lookbehind assertion (?<=\w)) and immediately after (the lookahead assertion (?=\w)) the hyphen, per the "medial # hyphen" definition that it is a hyphen occurring immediately between two letters. medialhyphen = re.compile(r"(?<=\w)-(?=\w)") whitespaceunderscore = re.compile(r"[\s_]", re.UNICODE) # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name if result != "HANGUL JUNGSEONG O-E": result = medialhyphen.sub("", result) result = whitespaceunderscore.sub("", result) return result.lower() def _to_unicode_scalar_value(s): """ Helper function for converting a character or surrogate pair into a Unicode scalar value e.g. "\ud800\udc00" -> 0x10000 The algorithm can be found in older versions of the Unicode Standard. https://unicode.org/versions/Unicode3.0.0/ch03.pdf, Section 3.7, D28 Unicode scalar value: a number N from 0 to 0x10FFFF is defined by applying the following algorithm to a character sequence S: If S is a single, non-surrogate value U: N = U If S is a surrogate pair H, L: N = (H - 0xD800) * 0x0400 + (L - 0xDC00) + 0x10000 :param s: :return: """ if len(s) == 1: return ord(s) elif len(s) == 2: return (ord(s[0]) - 0xD800) * 0x0400 + (ord(s[1]) - 0xDC00) + 0x10000 else: raise ValueError #: Dictionary for looking up the prefixes for derived names. #: See Unicode Standard section 4.8 and table 4-8 for more information on the name derivation rules NR1 and NR2. #: https://www.unicode.org/versions/Unicode10.0.0/ch04.pdf _nr_prefix_strings = { six.moves.range( 0xAC00, 0xD7A3 + 1): "HANGUL SYLLABLE ", six.moves.range( 0x3400, 0x4DB5 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range( 0x4E00, 0x9FEA + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x20000, 0x2A6D6 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2A700, 0x2B734 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B740, 0x2B81D + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B820, 0x2CEA1 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2CEB0, 0x2EBE0 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x17000, 0x187EC + 1): "TANGUT IDEOGRAPH-", six.moves.range(0x1B170, 0x1B2FB + 1): "NUSHU CHARACTER-", six.moves.range( 0xF900, 0xFA6D + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range( 0xFA70, 0xFAD9 + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range(0x2F800, 0x2FA1D + 1): "CJK COMPATIBILITY IDEOGRAPH-" } def _is_derived(i): """ Helper function for determining if a Unicode scalar value falls into one of the ranges of derived names. :param i: Unicode scalar value. :return: Boolean. True if the value is in one of the derived ranges. False otherwise. """ for lookup_range in _nr_prefix_strings.keys(): if i in lookup_range: return True return False def _get_nr_prefix(i): """ Helper function for looking up the derived name prefix associated with a Unicode scalar value. :param i: Unicode scalar value. :return: String with the derived name prefix. """ for lookup_range, prefix_string in _nr_prefix_strings.items(): if i in lookup_range: return prefix_string raise ValueError("No prefix string associated with {0}!".format(i)) #: Documentation on the fields of UnicodeData.txt: #: https://www.unicode.org/L2/L1999/UnicodeData.html #: https://www.unicode.org/reports/tr44/#UnicodeData.txt UnicodeCharacter = namedtuple("UnicodeCharacter", ["code", "name", "category", "combining", "bidi", "decomposition", "decimal", "digit", "numeric", "mirrored", "unicode_1_name", "iso_comment", "uppercase", "lowercase", "titlecase"]) class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def _load_unicode_block_info(self): """ Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c)) class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c casefold_map = CaseFoldingMap() def casefold(s, fullcasefold=True, useturkicmapping=False): """ Function for performing case folding. This function will take the input string s and return a copy of the string suitable for caseless comparisons. The input string must be of type 'unicode', otherwise a TypeError will be raised. For more information on case folding, see section 3.13 of the Unicode Standard. See also the following FAQ on the Unicode website: https://unicode.org/faq/casemap_charprop.htm By default, full case folding (where the string length may change) is done. It is possible to use simple case folding (single character mappings only) by setting the boolean parameter fullcasefold=False. By default, case folding does not handle the Turkic case of dotted vs dotless 'i'. To perform case folding using the special Turkic mappings, pass the boolean parameter useturkicmapping=True. For more info on the dotted vs dotless 'i', see the following web pages: https://en.wikipedia.org/wiki/Dotted_and_dotless_I http://www.i18nguy.com/unicode/turkish-i18n.html#problem :param s: String to transform :param fullcasefold: Boolean indicating if a full case fold (default is True) should be done. If False, a simple case fold will be performed. :param useturkicmapping: Boolean indicating if the special turkic mapping (default is False) for the dotted and dotless 'i' should be used. :return: Copy of string that has been transformed for caseless comparison. """ if not isinstance(s, six.text_type): raise TypeError(u"String to casefold must be of type 'unicode'!") lookup_order = "CF" if not fullcasefold: lookup_order = "CS" if useturkicmapping: lookup_order = "T" + lookup_order return u"".join([casefold_map.lookup(c, lookup_order=lookup_order) for c in preservesurrogates(s)])
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
_unichr
python
def _unichr(i): if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: # Workaround the error "ValueError: unichr() arg not in range(0x10000) (narrow Python build)" return struct.pack("i", i).decode("utf-32")
Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L49-L62
null
from collections import defaultdict, namedtuple, OrderedDict from fractions import Fraction import codecs import os import re import struct import six try: # Python 2 style import from hangulutil import _get_hangul_syllable_name except ImportError: # Python 3 style import from .hangulutil import _get_hangul_syllable_name #: Ranges of surrogate pairs HIGH_SURROGATE_START = u"\ud800" HIGH_SURROGATE_END = u"\udbff" LOW_SURROGATE_START = u"\udc00" LOW_SURROGATE_END = u"\udfff" def preservesurrogates(s): """ Function for splitting a string into a list of characters, preserving surrogate pairs. In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character u"\U0001e900" is stored as the surrogate pair u"\ud83a\udd00": s = u"AB\U0001e900CD" len(s) -> 6 list(s) -> [u'A', u'B', u'\ud83a', u'\udd00', u'C', 'D'] len(preservesurrogates(s)) -> 5 list(preservesurrogates(s)) -> [u'A', u'B', u'\U0001e900', u'C', u'D'] :param s: String to split :return: List of characters """ if not isinstance(s, six.text_type): raise TypeError(u"String to split must be of type 'unicode'!") surrogates_regex_str = u"[{0}-{1}][{2}-{3}]".format(HIGH_SURROGATE_START, HIGH_SURROGATE_END, LOW_SURROGATE_START, LOW_SURROGATE_END) surrogates_regex = re.compile(u"(?:{0})|.".format(surrogates_regex_str)) return surrogates_regex.findall(s) def _hexstr_to_unichr(s): """ Helper function for taking a hex string and returning a Unicode character. :param s: hex string to convert :return: Unicode character """ return _unichr(int(s, 16)) def _padded_hex(i, pad_width=4, uppercase=True): """ Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes until the string is of the specified width. For example: _padded_hex(31, pad_width=4, uppercase=True) -> "001F" :param i: integer to convert to a hex string :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0' as needed. :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True). :return: Hex string representation of the input integer. """ result = hex(i)[2:] # Remove the leading "0x" if uppercase: result = result.upper() return result.zfill(pad_width) def _uax44lm2transform(s): """ Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>. The rule is defined as follows: "UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in U+1180 HANGUL JUNGSEONG O-E." Therefore, correctly implementing the rule involves performing the following three operations, in order: 1. remove all medial hyphens (except the medial hyphen in the name for U+1180) 2. remove all whitespace and underscore characters 3. apply toLowercase() to both strings A "medial hyphen" is defined as follows (quoted from the above referenced web page): "In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently occur medially as a result of removing whitespace before removing hyphens in a particular implementation of matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in loose matching." :param s: String to transform :return: String transformed per UAX44-LM2 loose matching rule. """ result = s # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the # lookbehind assertion (?<=\w)) and immediately after (the lookahead assertion (?=\w)) the hyphen, per the "medial # hyphen" definition that it is a hyphen occurring immediately between two letters. medialhyphen = re.compile(r"(?<=\w)-(?=\w)") whitespaceunderscore = re.compile(r"[\s_]", re.UNICODE) # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name if result != "HANGUL JUNGSEONG O-E": result = medialhyphen.sub("", result) result = whitespaceunderscore.sub("", result) return result.lower() def _to_unicode_scalar_value(s): """ Helper function for converting a character or surrogate pair into a Unicode scalar value e.g. "\ud800\udc00" -> 0x10000 The algorithm can be found in older versions of the Unicode Standard. https://unicode.org/versions/Unicode3.0.0/ch03.pdf, Section 3.7, D28 Unicode scalar value: a number N from 0 to 0x10FFFF is defined by applying the following algorithm to a character sequence S: If S is a single, non-surrogate value U: N = U If S is a surrogate pair H, L: N = (H - 0xD800) * 0x0400 + (L - 0xDC00) + 0x10000 :param s: :return: """ if len(s) == 1: return ord(s) elif len(s) == 2: return (ord(s[0]) - 0xD800) * 0x0400 + (ord(s[1]) - 0xDC00) + 0x10000 else: raise ValueError #: Dictionary for looking up the prefixes for derived names. #: See Unicode Standard section 4.8 and table 4-8 for more information on the name derivation rules NR1 and NR2. #: https://www.unicode.org/versions/Unicode10.0.0/ch04.pdf _nr_prefix_strings = { six.moves.range( 0xAC00, 0xD7A3 + 1): "HANGUL SYLLABLE ", six.moves.range( 0x3400, 0x4DB5 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range( 0x4E00, 0x9FEA + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x20000, 0x2A6D6 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2A700, 0x2B734 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B740, 0x2B81D + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B820, 0x2CEA1 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2CEB0, 0x2EBE0 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x17000, 0x187EC + 1): "TANGUT IDEOGRAPH-", six.moves.range(0x1B170, 0x1B2FB + 1): "NUSHU CHARACTER-", six.moves.range( 0xF900, 0xFA6D + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range( 0xFA70, 0xFAD9 + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range(0x2F800, 0x2FA1D + 1): "CJK COMPATIBILITY IDEOGRAPH-" } def _is_derived(i): """ Helper function for determining if a Unicode scalar value falls into one of the ranges of derived names. :param i: Unicode scalar value. :return: Boolean. True if the value is in one of the derived ranges. False otherwise. """ for lookup_range in _nr_prefix_strings.keys(): if i in lookup_range: return True return False def _get_nr_prefix(i): """ Helper function for looking up the derived name prefix associated with a Unicode scalar value. :param i: Unicode scalar value. :return: String with the derived name prefix. """ for lookup_range, prefix_string in _nr_prefix_strings.items(): if i in lookup_range: return prefix_string raise ValueError("No prefix string associated with {0}!".format(i)) #: Documentation on the fields of UnicodeData.txt: #: https://www.unicode.org/L2/L1999/UnicodeData.html #: https://www.unicode.org/reports/tr44/#UnicodeData.txt UnicodeCharacter = namedtuple("UnicodeCharacter", ["code", "name", "category", "combining", "bidi", "decomposition", "decimal", "digit", "numeric", "mirrored", "unicode_1_name", "iso_comment", "uppercase", "lowercase", "titlecase"]) class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def _load_unicode_block_info(self): """ Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c)) class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c casefold_map = CaseFoldingMap() def casefold(s, fullcasefold=True, useturkicmapping=False): """ Function for performing case folding. This function will take the input string s and return a copy of the string suitable for caseless comparisons. The input string must be of type 'unicode', otherwise a TypeError will be raised. For more information on case folding, see section 3.13 of the Unicode Standard. See also the following FAQ on the Unicode website: https://unicode.org/faq/casemap_charprop.htm By default, full case folding (where the string length may change) is done. It is possible to use simple case folding (single character mappings only) by setting the boolean parameter fullcasefold=False. By default, case folding does not handle the Turkic case of dotted vs dotless 'i'. To perform case folding using the special Turkic mappings, pass the boolean parameter useturkicmapping=True. For more info on the dotted vs dotless 'i', see the following web pages: https://en.wikipedia.org/wiki/Dotted_and_dotless_I http://www.i18nguy.com/unicode/turkish-i18n.html#problem :param s: String to transform :param fullcasefold: Boolean indicating if a full case fold (default is True) should be done. If False, a simple case fold will be performed. :param useturkicmapping: Boolean indicating if the special turkic mapping (default is False) for the dotted and dotless 'i' should be used. :return: Copy of string that has been transformed for caseless comparison. """ if not isinstance(s, six.text_type): raise TypeError(u"String to casefold must be of type 'unicode'!") lookup_order = "CF" if not fullcasefold: lookup_order = "CS" if useturkicmapping: lookup_order = "T" + lookup_order return u"".join([casefold_map.lookup(c, lookup_order=lookup_order) for c in preservesurrogates(s)])
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
_padded_hex
python
def _padded_hex(i, pad_width=4, uppercase=True): result = hex(i)[2:] # Remove the leading "0x" if uppercase: result = result.upper() return result.zfill(pad_width)
Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes until the string is of the specified width. For example: _padded_hex(31, pad_width=4, uppercase=True) -> "001F" :param i: integer to convert to a hex string :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0' as needed. :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True). :return: Hex string representation of the input integer.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L75-L91
null
from collections import defaultdict, namedtuple, OrderedDict from fractions import Fraction import codecs import os import re import struct import six try: # Python 2 style import from hangulutil import _get_hangul_syllable_name except ImportError: # Python 3 style import from .hangulutil import _get_hangul_syllable_name #: Ranges of surrogate pairs HIGH_SURROGATE_START = u"\ud800" HIGH_SURROGATE_END = u"\udbff" LOW_SURROGATE_START = u"\udc00" LOW_SURROGATE_END = u"\udfff" def preservesurrogates(s): """ Function for splitting a string into a list of characters, preserving surrogate pairs. In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character u"\U0001e900" is stored as the surrogate pair u"\ud83a\udd00": s = u"AB\U0001e900CD" len(s) -> 6 list(s) -> [u'A', u'B', u'\ud83a', u'\udd00', u'C', 'D'] len(preservesurrogates(s)) -> 5 list(preservesurrogates(s)) -> [u'A', u'B', u'\U0001e900', u'C', u'D'] :param s: String to split :return: List of characters """ if not isinstance(s, six.text_type): raise TypeError(u"String to split must be of type 'unicode'!") surrogates_regex_str = u"[{0}-{1}][{2}-{3}]".format(HIGH_SURROGATE_START, HIGH_SURROGATE_END, LOW_SURROGATE_START, LOW_SURROGATE_END) surrogates_regex = re.compile(u"(?:{0})|.".format(surrogates_regex_str)) return surrogates_regex.findall(s) def _unichr(i): """ Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character """ if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: # Workaround the error "ValueError: unichr() arg not in range(0x10000) (narrow Python build)" return struct.pack("i", i).decode("utf-32") def _hexstr_to_unichr(s): """ Helper function for taking a hex string and returning a Unicode character. :param s: hex string to convert :return: Unicode character """ return _unichr(int(s, 16)) def _uax44lm2transform(s): """ Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>. The rule is defined as follows: "UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in U+1180 HANGUL JUNGSEONG O-E." Therefore, correctly implementing the rule involves performing the following three operations, in order: 1. remove all medial hyphens (except the medial hyphen in the name for U+1180) 2. remove all whitespace and underscore characters 3. apply toLowercase() to both strings A "medial hyphen" is defined as follows (quoted from the above referenced web page): "In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently occur medially as a result of removing whitespace before removing hyphens in a particular implementation of matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in loose matching." :param s: String to transform :return: String transformed per UAX44-LM2 loose matching rule. """ result = s # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the # lookbehind assertion (?<=\w)) and immediately after (the lookahead assertion (?=\w)) the hyphen, per the "medial # hyphen" definition that it is a hyphen occurring immediately between two letters. medialhyphen = re.compile(r"(?<=\w)-(?=\w)") whitespaceunderscore = re.compile(r"[\s_]", re.UNICODE) # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name if result != "HANGUL JUNGSEONG O-E": result = medialhyphen.sub("", result) result = whitespaceunderscore.sub("", result) return result.lower() def _to_unicode_scalar_value(s): """ Helper function for converting a character or surrogate pair into a Unicode scalar value e.g. "\ud800\udc00" -> 0x10000 The algorithm can be found in older versions of the Unicode Standard. https://unicode.org/versions/Unicode3.0.0/ch03.pdf, Section 3.7, D28 Unicode scalar value: a number N from 0 to 0x10FFFF is defined by applying the following algorithm to a character sequence S: If S is a single, non-surrogate value U: N = U If S is a surrogate pair H, L: N = (H - 0xD800) * 0x0400 + (L - 0xDC00) + 0x10000 :param s: :return: """ if len(s) == 1: return ord(s) elif len(s) == 2: return (ord(s[0]) - 0xD800) * 0x0400 + (ord(s[1]) - 0xDC00) + 0x10000 else: raise ValueError #: Dictionary for looking up the prefixes for derived names. #: See Unicode Standard section 4.8 and table 4-8 for more information on the name derivation rules NR1 and NR2. #: https://www.unicode.org/versions/Unicode10.0.0/ch04.pdf _nr_prefix_strings = { six.moves.range( 0xAC00, 0xD7A3 + 1): "HANGUL SYLLABLE ", six.moves.range( 0x3400, 0x4DB5 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range( 0x4E00, 0x9FEA + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x20000, 0x2A6D6 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2A700, 0x2B734 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B740, 0x2B81D + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B820, 0x2CEA1 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2CEB0, 0x2EBE0 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x17000, 0x187EC + 1): "TANGUT IDEOGRAPH-", six.moves.range(0x1B170, 0x1B2FB + 1): "NUSHU CHARACTER-", six.moves.range( 0xF900, 0xFA6D + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range( 0xFA70, 0xFAD9 + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range(0x2F800, 0x2FA1D + 1): "CJK COMPATIBILITY IDEOGRAPH-" } def _is_derived(i): """ Helper function for determining if a Unicode scalar value falls into one of the ranges of derived names. :param i: Unicode scalar value. :return: Boolean. True if the value is in one of the derived ranges. False otherwise. """ for lookup_range in _nr_prefix_strings.keys(): if i in lookup_range: return True return False def _get_nr_prefix(i): """ Helper function for looking up the derived name prefix associated with a Unicode scalar value. :param i: Unicode scalar value. :return: String with the derived name prefix. """ for lookup_range, prefix_string in _nr_prefix_strings.items(): if i in lookup_range: return prefix_string raise ValueError("No prefix string associated with {0}!".format(i)) #: Documentation on the fields of UnicodeData.txt: #: https://www.unicode.org/L2/L1999/UnicodeData.html #: https://www.unicode.org/reports/tr44/#UnicodeData.txt UnicodeCharacter = namedtuple("UnicodeCharacter", ["code", "name", "category", "combining", "bidi", "decomposition", "decimal", "digit", "numeric", "mirrored", "unicode_1_name", "iso_comment", "uppercase", "lowercase", "titlecase"]) class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def _load_unicode_block_info(self): """ Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c)) class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c casefold_map = CaseFoldingMap() def casefold(s, fullcasefold=True, useturkicmapping=False): """ Function for performing case folding. This function will take the input string s and return a copy of the string suitable for caseless comparisons. The input string must be of type 'unicode', otherwise a TypeError will be raised. For more information on case folding, see section 3.13 of the Unicode Standard. See also the following FAQ on the Unicode website: https://unicode.org/faq/casemap_charprop.htm By default, full case folding (where the string length may change) is done. It is possible to use simple case folding (single character mappings only) by setting the boolean parameter fullcasefold=False. By default, case folding does not handle the Turkic case of dotted vs dotless 'i'. To perform case folding using the special Turkic mappings, pass the boolean parameter useturkicmapping=True. For more info on the dotted vs dotless 'i', see the following web pages: https://en.wikipedia.org/wiki/Dotted_and_dotless_I http://www.i18nguy.com/unicode/turkish-i18n.html#problem :param s: String to transform :param fullcasefold: Boolean indicating if a full case fold (default is True) should be done. If False, a simple case fold will be performed. :param useturkicmapping: Boolean indicating if the special turkic mapping (default is False) for the dotted and dotless 'i' should be used. :return: Copy of string that has been transformed for caseless comparison. """ if not isinstance(s, six.text_type): raise TypeError(u"String to casefold must be of type 'unicode'!") lookup_order = "CF" if not fullcasefold: lookup_order = "CS" if useturkicmapping: lookup_order = "T" + lookup_order return u"".join([casefold_map.lookup(c, lookup_order=lookup_order) for c in preservesurrogates(s)])
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
_uax44lm2transform
python
def _uax44lm2transform(s): result = s # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the # lookbehind assertion (?<=\w)) and immediately after (the lookahead assertion (?=\w)) the hyphen, per the "medial # hyphen" definition that it is a hyphen occurring immediately between two letters. medialhyphen = re.compile(r"(?<=\w)-(?=\w)") whitespaceunderscore = re.compile(r"[\s_]", re.UNICODE) # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name if result != "HANGUL JUNGSEONG O-E": result = medialhyphen.sub("", result) result = whitespaceunderscore.sub("", result) return result.lower()
Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>. The rule is defined as follows: "UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in U+1180 HANGUL JUNGSEONG O-E." Therefore, correctly implementing the rule involves performing the following three operations, in order: 1. remove all medial hyphens (except the medial hyphen in the name for U+1180) 2. remove all whitespace and underscore characters 3. apply toLowercase() to both strings A "medial hyphen" is defined as follows (quoted from the above referenced web page): "In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently occur medially as a result of removing whitespace before removing hyphens in a particular implementation of matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in loose matching." :param s: String to transform :return: String transformed per UAX44-LM2 loose matching rule.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L94-L135
null
from collections import defaultdict, namedtuple, OrderedDict from fractions import Fraction import codecs import os import re import struct import six try: # Python 2 style import from hangulutil import _get_hangul_syllable_name except ImportError: # Python 3 style import from .hangulutil import _get_hangul_syllable_name #: Ranges of surrogate pairs HIGH_SURROGATE_START = u"\ud800" HIGH_SURROGATE_END = u"\udbff" LOW_SURROGATE_START = u"\udc00" LOW_SURROGATE_END = u"\udfff" def preservesurrogates(s): """ Function for splitting a string into a list of characters, preserving surrogate pairs. In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character u"\U0001e900" is stored as the surrogate pair u"\ud83a\udd00": s = u"AB\U0001e900CD" len(s) -> 6 list(s) -> [u'A', u'B', u'\ud83a', u'\udd00', u'C', 'D'] len(preservesurrogates(s)) -> 5 list(preservesurrogates(s)) -> [u'A', u'B', u'\U0001e900', u'C', u'D'] :param s: String to split :return: List of characters """ if not isinstance(s, six.text_type): raise TypeError(u"String to split must be of type 'unicode'!") surrogates_regex_str = u"[{0}-{1}][{2}-{3}]".format(HIGH_SURROGATE_START, HIGH_SURROGATE_END, LOW_SURROGATE_START, LOW_SURROGATE_END) surrogates_regex = re.compile(u"(?:{0})|.".format(surrogates_regex_str)) return surrogates_regex.findall(s) def _unichr(i): """ Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character """ if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: # Workaround the error "ValueError: unichr() arg not in range(0x10000) (narrow Python build)" return struct.pack("i", i).decode("utf-32") def _hexstr_to_unichr(s): """ Helper function for taking a hex string and returning a Unicode character. :param s: hex string to convert :return: Unicode character """ return _unichr(int(s, 16)) def _padded_hex(i, pad_width=4, uppercase=True): """ Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes until the string is of the specified width. For example: _padded_hex(31, pad_width=4, uppercase=True) -> "001F" :param i: integer to convert to a hex string :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0' as needed. :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True). :return: Hex string representation of the input integer. """ result = hex(i)[2:] # Remove the leading "0x" if uppercase: result = result.upper() return result.zfill(pad_width) def _to_unicode_scalar_value(s): """ Helper function for converting a character or surrogate pair into a Unicode scalar value e.g. "\ud800\udc00" -> 0x10000 The algorithm can be found in older versions of the Unicode Standard. https://unicode.org/versions/Unicode3.0.0/ch03.pdf, Section 3.7, D28 Unicode scalar value: a number N from 0 to 0x10FFFF is defined by applying the following algorithm to a character sequence S: If S is a single, non-surrogate value U: N = U If S is a surrogate pair H, L: N = (H - 0xD800) * 0x0400 + (L - 0xDC00) + 0x10000 :param s: :return: """ if len(s) == 1: return ord(s) elif len(s) == 2: return (ord(s[0]) - 0xD800) * 0x0400 + (ord(s[1]) - 0xDC00) + 0x10000 else: raise ValueError #: Dictionary for looking up the prefixes for derived names. #: See Unicode Standard section 4.8 and table 4-8 for more information on the name derivation rules NR1 and NR2. #: https://www.unicode.org/versions/Unicode10.0.0/ch04.pdf _nr_prefix_strings = { six.moves.range( 0xAC00, 0xD7A3 + 1): "HANGUL SYLLABLE ", six.moves.range( 0x3400, 0x4DB5 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range( 0x4E00, 0x9FEA + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x20000, 0x2A6D6 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2A700, 0x2B734 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B740, 0x2B81D + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B820, 0x2CEA1 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2CEB0, 0x2EBE0 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x17000, 0x187EC + 1): "TANGUT IDEOGRAPH-", six.moves.range(0x1B170, 0x1B2FB + 1): "NUSHU CHARACTER-", six.moves.range( 0xF900, 0xFA6D + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range( 0xFA70, 0xFAD9 + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range(0x2F800, 0x2FA1D + 1): "CJK COMPATIBILITY IDEOGRAPH-" } def _is_derived(i): """ Helper function for determining if a Unicode scalar value falls into one of the ranges of derived names. :param i: Unicode scalar value. :return: Boolean. True if the value is in one of the derived ranges. False otherwise. """ for lookup_range in _nr_prefix_strings.keys(): if i in lookup_range: return True return False def _get_nr_prefix(i): """ Helper function for looking up the derived name prefix associated with a Unicode scalar value. :param i: Unicode scalar value. :return: String with the derived name prefix. """ for lookup_range, prefix_string in _nr_prefix_strings.items(): if i in lookup_range: return prefix_string raise ValueError("No prefix string associated with {0}!".format(i)) #: Documentation on the fields of UnicodeData.txt: #: https://www.unicode.org/L2/L1999/UnicodeData.html #: https://www.unicode.org/reports/tr44/#UnicodeData.txt UnicodeCharacter = namedtuple("UnicodeCharacter", ["code", "name", "category", "combining", "bidi", "decomposition", "decimal", "digit", "numeric", "mirrored", "unicode_1_name", "iso_comment", "uppercase", "lowercase", "titlecase"]) class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def _load_unicode_block_info(self): """ Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c)) class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c casefold_map = CaseFoldingMap() def casefold(s, fullcasefold=True, useturkicmapping=False): """ Function for performing case folding. This function will take the input string s and return a copy of the string suitable for caseless comparisons. The input string must be of type 'unicode', otherwise a TypeError will be raised. For more information on case folding, see section 3.13 of the Unicode Standard. See also the following FAQ on the Unicode website: https://unicode.org/faq/casemap_charprop.htm By default, full case folding (where the string length may change) is done. It is possible to use simple case folding (single character mappings only) by setting the boolean parameter fullcasefold=False. By default, case folding does not handle the Turkic case of dotted vs dotless 'i'. To perform case folding using the special Turkic mappings, pass the boolean parameter useturkicmapping=True. For more info on the dotted vs dotless 'i', see the following web pages: https://en.wikipedia.org/wiki/Dotted_and_dotless_I http://www.i18nguy.com/unicode/turkish-i18n.html#problem :param s: String to transform :param fullcasefold: Boolean indicating if a full case fold (default is True) should be done. If False, a simple case fold will be performed. :param useturkicmapping: Boolean indicating if the special turkic mapping (default is False) for the dotted and dotless 'i' should be used. :return: Copy of string that has been transformed for caseless comparison. """ if not isinstance(s, six.text_type): raise TypeError(u"String to casefold must be of type 'unicode'!") lookup_order = "CF" if not fullcasefold: lookup_order = "CS" if useturkicmapping: lookup_order = "T" + lookup_order return u"".join([casefold_map.lookup(c, lookup_order=lookup_order) for c in preservesurrogates(s)])
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
_to_unicode_scalar_value
python
def _to_unicode_scalar_value(s): if len(s) == 1: return ord(s) elif len(s) == 2: return (ord(s[0]) - 0xD800) * 0x0400 + (ord(s[1]) - 0xDC00) + 0x10000 else: raise ValueError
Helper function for converting a character or surrogate pair into a Unicode scalar value e.g. "\ud800\udc00" -> 0x10000 The algorithm can be found in older versions of the Unicode Standard. https://unicode.org/versions/Unicode3.0.0/ch03.pdf, Section 3.7, D28 Unicode scalar value: a number N from 0 to 0x10FFFF is defined by applying the following algorithm to a character sequence S: If S is a single, non-surrogate value U: N = U If S is a surrogate pair H, L: N = (H - 0xD800) * 0x0400 + (L - 0xDC00) + 0x10000 :param s: :return:
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L138-L162
null
from collections import defaultdict, namedtuple, OrderedDict from fractions import Fraction import codecs import os import re import struct import six try: # Python 2 style import from hangulutil import _get_hangul_syllable_name except ImportError: # Python 3 style import from .hangulutil import _get_hangul_syllable_name #: Ranges of surrogate pairs HIGH_SURROGATE_START = u"\ud800" HIGH_SURROGATE_END = u"\udbff" LOW_SURROGATE_START = u"\udc00" LOW_SURROGATE_END = u"\udfff" def preservesurrogates(s): """ Function for splitting a string into a list of characters, preserving surrogate pairs. In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character u"\U0001e900" is stored as the surrogate pair u"\ud83a\udd00": s = u"AB\U0001e900CD" len(s) -> 6 list(s) -> [u'A', u'B', u'\ud83a', u'\udd00', u'C', 'D'] len(preservesurrogates(s)) -> 5 list(preservesurrogates(s)) -> [u'A', u'B', u'\U0001e900', u'C', u'D'] :param s: String to split :return: List of characters """ if not isinstance(s, six.text_type): raise TypeError(u"String to split must be of type 'unicode'!") surrogates_regex_str = u"[{0}-{1}][{2}-{3}]".format(HIGH_SURROGATE_START, HIGH_SURROGATE_END, LOW_SURROGATE_START, LOW_SURROGATE_END) surrogates_regex = re.compile(u"(?:{0})|.".format(surrogates_regex_str)) return surrogates_regex.findall(s) def _unichr(i): """ Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character """ if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: # Workaround the error "ValueError: unichr() arg not in range(0x10000) (narrow Python build)" return struct.pack("i", i).decode("utf-32") def _hexstr_to_unichr(s): """ Helper function for taking a hex string and returning a Unicode character. :param s: hex string to convert :return: Unicode character """ return _unichr(int(s, 16)) def _padded_hex(i, pad_width=4, uppercase=True): """ Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes until the string is of the specified width. For example: _padded_hex(31, pad_width=4, uppercase=True) -> "001F" :param i: integer to convert to a hex string :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0' as needed. :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True). :return: Hex string representation of the input integer. """ result = hex(i)[2:] # Remove the leading "0x" if uppercase: result = result.upper() return result.zfill(pad_width) def _uax44lm2transform(s): """ Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>. The rule is defined as follows: "UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in U+1180 HANGUL JUNGSEONG O-E." Therefore, correctly implementing the rule involves performing the following three operations, in order: 1. remove all medial hyphens (except the medial hyphen in the name for U+1180) 2. remove all whitespace and underscore characters 3. apply toLowercase() to both strings A "medial hyphen" is defined as follows (quoted from the above referenced web page): "In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently occur medially as a result of removing whitespace before removing hyphens in a particular implementation of matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in loose matching." :param s: String to transform :return: String transformed per UAX44-LM2 loose matching rule. """ result = s # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the # lookbehind assertion (?<=\w)) and immediately after (the lookahead assertion (?=\w)) the hyphen, per the "medial # hyphen" definition that it is a hyphen occurring immediately between two letters. medialhyphen = re.compile(r"(?<=\w)-(?=\w)") whitespaceunderscore = re.compile(r"[\s_]", re.UNICODE) # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name if result != "HANGUL JUNGSEONG O-E": result = medialhyphen.sub("", result) result = whitespaceunderscore.sub("", result) return result.lower() #: Dictionary for looking up the prefixes for derived names. #: See Unicode Standard section 4.8 and table 4-8 for more information on the name derivation rules NR1 and NR2. #: https://www.unicode.org/versions/Unicode10.0.0/ch04.pdf _nr_prefix_strings = { six.moves.range( 0xAC00, 0xD7A3 + 1): "HANGUL SYLLABLE ", six.moves.range( 0x3400, 0x4DB5 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range( 0x4E00, 0x9FEA + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x20000, 0x2A6D6 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2A700, 0x2B734 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B740, 0x2B81D + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B820, 0x2CEA1 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2CEB0, 0x2EBE0 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x17000, 0x187EC + 1): "TANGUT IDEOGRAPH-", six.moves.range(0x1B170, 0x1B2FB + 1): "NUSHU CHARACTER-", six.moves.range( 0xF900, 0xFA6D + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range( 0xFA70, 0xFAD9 + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range(0x2F800, 0x2FA1D + 1): "CJK COMPATIBILITY IDEOGRAPH-" } def _is_derived(i): """ Helper function for determining if a Unicode scalar value falls into one of the ranges of derived names. :param i: Unicode scalar value. :return: Boolean. True if the value is in one of the derived ranges. False otherwise. """ for lookup_range in _nr_prefix_strings.keys(): if i in lookup_range: return True return False def _get_nr_prefix(i): """ Helper function for looking up the derived name prefix associated with a Unicode scalar value. :param i: Unicode scalar value. :return: String with the derived name prefix. """ for lookup_range, prefix_string in _nr_prefix_strings.items(): if i in lookup_range: return prefix_string raise ValueError("No prefix string associated with {0}!".format(i)) #: Documentation on the fields of UnicodeData.txt: #: https://www.unicode.org/L2/L1999/UnicodeData.html #: https://www.unicode.org/reports/tr44/#UnicodeData.txt UnicodeCharacter = namedtuple("UnicodeCharacter", ["code", "name", "category", "combining", "bidi", "decomposition", "decimal", "digit", "numeric", "mirrored", "unicode_1_name", "iso_comment", "uppercase", "lowercase", "titlecase"]) class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def _load_unicode_block_info(self): """ Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c)) class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c casefold_map = CaseFoldingMap() def casefold(s, fullcasefold=True, useturkicmapping=False): """ Function for performing case folding. This function will take the input string s and return a copy of the string suitable for caseless comparisons. The input string must be of type 'unicode', otherwise a TypeError will be raised. For more information on case folding, see section 3.13 of the Unicode Standard. See also the following FAQ on the Unicode website: https://unicode.org/faq/casemap_charprop.htm By default, full case folding (where the string length may change) is done. It is possible to use simple case folding (single character mappings only) by setting the boolean parameter fullcasefold=False. By default, case folding does not handle the Turkic case of dotted vs dotless 'i'. To perform case folding using the special Turkic mappings, pass the boolean parameter useturkicmapping=True. For more info on the dotted vs dotless 'i', see the following web pages: https://en.wikipedia.org/wiki/Dotted_and_dotless_I http://www.i18nguy.com/unicode/turkish-i18n.html#problem :param s: String to transform :param fullcasefold: Boolean indicating if a full case fold (default is True) should be done. If False, a simple case fold will be performed. :param useturkicmapping: Boolean indicating if the special turkic mapping (default is False) for the dotted and dotless 'i' should be used. :return: Copy of string that has been transformed for caseless comparison. """ if not isinstance(s, six.text_type): raise TypeError(u"String to casefold must be of type 'unicode'!") lookup_order = "CF" if not fullcasefold: lookup_order = "CS" if useturkicmapping: lookup_order = "T" + lookup_order return u"".join([casefold_map.lookup(c, lookup_order=lookup_order) for c in preservesurrogates(s)])
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
_get_nr_prefix
python
def _get_nr_prefix(i): for lookup_range, prefix_string in _nr_prefix_strings.items(): if i in lookup_range: return prefix_string raise ValueError("No prefix string associated with {0}!".format(i))
Helper function for looking up the derived name prefix associated with a Unicode scalar value. :param i: Unicode scalar value. :return: String with the derived name prefix.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L198-L208
null
from collections import defaultdict, namedtuple, OrderedDict from fractions import Fraction import codecs import os import re import struct import six try: # Python 2 style import from hangulutil import _get_hangul_syllable_name except ImportError: # Python 3 style import from .hangulutil import _get_hangul_syllable_name #: Ranges of surrogate pairs HIGH_SURROGATE_START = u"\ud800" HIGH_SURROGATE_END = u"\udbff" LOW_SURROGATE_START = u"\udc00" LOW_SURROGATE_END = u"\udfff" def preservesurrogates(s): """ Function for splitting a string into a list of characters, preserving surrogate pairs. In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character u"\U0001e900" is stored as the surrogate pair u"\ud83a\udd00": s = u"AB\U0001e900CD" len(s) -> 6 list(s) -> [u'A', u'B', u'\ud83a', u'\udd00', u'C', 'D'] len(preservesurrogates(s)) -> 5 list(preservesurrogates(s)) -> [u'A', u'B', u'\U0001e900', u'C', u'D'] :param s: String to split :return: List of characters """ if not isinstance(s, six.text_type): raise TypeError(u"String to split must be of type 'unicode'!") surrogates_regex_str = u"[{0}-{1}][{2}-{3}]".format(HIGH_SURROGATE_START, HIGH_SURROGATE_END, LOW_SURROGATE_START, LOW_SURROGATE_END) surrogates_regex = re.compile(u"(?:{0})|.".format(surrogates_regex_str)) return surrogates_regex.findall(s) def _unichr(i): """ Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character """ if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: # Workaround the error "ValueError: unichr() arg not in range(0x10000) (narrow Python build)" return struct.pack("i", i).decode("utf-32") def _hexstr_to_unichr(s): """ Helper function for taking a hex string and returning a Unicode character. :param s: hex string to convert :return: Unicode character """ return _unichr(int(s, 16)) def _padded_hex(i, pad_width=4, uppercase=True): """ Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes until the string is of the specified width. For example: _padded_hex(31, pad_width=4, uppercase=True) -> "001F" :param i: integer to convert to a hex string :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0' as needed. :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True). :return: Hex string representation of the input integer. """ result = hex(i)[2:] # Remove the leading "0x" if uppercase: result = result.upper() return result.zfill(pad_width) def _uax44lm2transform(s): """ Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>. The rule is defined as follows: "UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in U+1180 HANGUL JUNGSEONG O-E." Therefore, correctly implementing the rule involves performing the following three operations, in order: 1. remove all medial hyphens (except the medial hyphen in the name for U+1180) 2. remove all whitespace and underscore characters 3. apply toLowercase() to both strings A "medial hyphen" is defined as follows (quoted from the above referenced web page): "In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently occur medially as a result of removing whitespace before removing hyphens in a particular implementation of matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in loose matching." :param s: String to transform :return: String transformed per UAX44-LM2 loose matching rule. """ result = s # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the # lookbehind assertion (?<=\w)) and immediately after (the lookahead assertion (?=\w)) the hyphen, per the "medial # hyphen" definition that it is a hyphen occurring immediately between two letters. medialhyphen = re.compile(r"(?<=\w)-(?=\w)") whitespaceunderscore = re.compile(r"[\s_]", re.UNICODE) # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name if result != "HANGUL JUNGSEONG O-E": result = medialhyphen.sub("", result) result = whitespaceunderscore.sub("", result) return result.lower() def _to_unicode_scalar_value(s): """ Helper function for converting a character or surrogate pair into a Unicode scalar value e.g. "\ud800\udc00" -> 0x10000 The algorithm can be found in older versions of the Unicode Standard. https://unicode.org/versions/Unicode3.0.0/ch03.pdf, Section 3.7, D28 Unicode scalar value: a number N from 0 to 0x10FFFF is defined by applying the following algorithm to a character sequence S: If S is a single, non-surrogate value U: N = U If S is a surrogate pair H, L: N = (H - 0xD800) * 0x0400 + (L - 0xDC00) + 0x10000 :param s: :return: """ if len(s) == 1: return ord(s) elif len(s) == 2: return (ord(s[0]) - 0xD800) * 0x0400 + (ord(s[1]) - 0xDC00) + 0x10000 else: raise ValueError #: Dictionary for looking up the prefixes for derived names. #: See Unicode Standard section 4.8 and table 4-8 for more information on the name derivation rules NR1 and NR2. #: https://www.unicode.org/versions/Unicode10.0.0/ch04.pdf _nr_prefix_strings = { six.moves.range( 0xAC00, 0xD7A3 + 1): "HANGUL SYLLABLE ", six.moves.range( 0x3400, 0x4DB5 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range( 0x4E00, 0x9FEA + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x20000, 0x2A6D6 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2A700, 0x2B734 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B740, 0x2B81D + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B820, 0x2CEA1 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2CEB0, 0x2EBE0 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x17000, 0x187EC + 1): "TANGUT IDEOGRAPH-", six.moves.range(0x1B170, 0x1B2FB + 1): "NUSHU CHARACTER-", six.moves.range( 0xF900, 0xFA6D + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range( 0xFA70, 0xFAD9 + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range(0x2F800, 0x2FA1D + 1): "CJK COMPATIBILITY IDEOGRAPH-" } def _is_derived(i): """ Helper function for determining if a Unicode scalar value falls into one of the ranges of derived names. :param i: Unicode scalar value. :return: Boolean. True if the value is in one of the derived ranges. False otherwise. """ for lookup_range in _nr_prefix_strings.keys(): if i in lookup_range: return True return False #: Documentation on the fields of UnicodeData.txt: #: https://www.unicode.org/L2/L1999/UnicodeData.html #: https://www.unicode.org/reports/tr44/#UnicodeData.txt UnicodeCharacter = namedtuple("UnicodeCharacter", ["code", "name", "category", "combining", "bidi", "decomposition", "decimal", "digit", "numeric", "mirrored", "unicode_1_name", "iso_comment", "uppercase", "lowercase", "titlecase"]) class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def _load_unicode_block_info(self): """ Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c)) class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c casefold_map = CaseFoldingMap() def casefold(s, fullcasefold=True, useturkicmapping=False): """ Function for performing case folding. This function will take the input string s and return a copy of the string suitable for caseless comparisons. The input string must be of type 'unicode', otherwise a TypeError will be raised. For more information on case folding, see section 3.13 of the Unicode Standard. See also the following FAQ on the Unicode website: https://unicode.org/faq/casemap_charprop.htm By default, full case folding (where the string length may change) is done. It is possible to use simple case folding (single character mappings only) by setting the boolean parameter fullcasefold=False. By default, case folding does not handle the Turkic case of dotted vs dotless 'i'. To perform case folding using the special Turkic mappings, pass the boolean parameter useturkicmapping=True. For more info on the dotted vs dotless 'i', see the following web pages: https://en.wikipedia.org/wiki/Dotted_and_dotless_I http://www.i18nguy.com/unicode/turkish-i18n.html#problem :param s: String to transform :param fullcasefold: Boolean indicating if a full case fold (default is True) should be done. If False, a simple case fold will be performed. :param useturkicmapping: Boolean indicating if the special turkic mapping (default is False) for the dotted and dotless 'i' should be used. :return: Copy of string that has been transformed for caseless comparison. """ if not isinstance(s, six.text_type): raise TypeError(u"String to casefold must be of type 'unicode'!") lookup_order = "CF" if not fullcasefold: lookup_order = "CS" if useturkicmapping: lookup_order = "T" + lookup_order return u"".join([casefold_map.lookup(c, lookup_order=lookup_order) for c in preservesurrogates(s)])
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
casefold
python
def casefold(s, fullcasefold=True, useturkicmapping=False): if not isinstance(s, six.text_type): raise TypeError(u"String to casefold must be of type 'unicode'!") lookup_order = "CF" if not fullcasefold: lookup_order = "CS" if useturkicmapping: lookup_order = "T" + lookup_order return u"".join([casefold_map.lookup(c, lookup_order=lookup_order) for c in preservesurrogates(s)])
Function for performing case folding. This function will take the input string s and return a copy of the string suitable for caseless comparisons. The input string must be of type 'unicode', otherwise a TypeError will be raised. For more information on case folding, see section 3.13 of the Unicode Standard. See also the following FAQ on the Unicode website: https://unicode.org/faq/casemap_charprop.htm By default, full case folding (where the string length may change) is done. It is possible to use simple case folding (single character mappings only) by setting the boolean parameter fullcasefold=False. By default, case folding does not handle the Turkic case of dotted vs dotless 'i'. To perform case folding using the special Turkic mappings, pass the boolean parameter useturkicmapping=True. For more info on the dotted vs dotless 'i', see the following web pages: https://en.wikipedia.org/wiki/Dotted_and_dotless_I http://www.i18nguy.com/unicode/turkish-i18n.html#problem :param s: String to transform :param fullcasefold: Boolean indicating if a full case fold (default is True) should be done. If False, a simple case fold will be performed. :param useturkicmapping: Boolean indicating if the special turkic mapping (default is False) for the dotted and dotless 'i' should be used. :return: Copy of string that has been transformed for caseless comparison.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L522-L560
[ "def preservesurrogates(s):\n \"\"\"\n Function for splitting a string into a list of characters, preserving surrogate pairs.\n\n In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character\n u\"\\U0001e900\" is stored as the surrogate pair u\"\\ud83a\\udd00\":\n\n s = u\"AB\\U0001e900CD\"\n len(s) -> 6\n list(s) -> [u'A', u'B', u'\\ud83a', u'\\udd00', u'C', 'D']\n len(preservesurrogates(s)) -> 5\n list(preservesurrogates(s)) -> [u'A', u'B', u'\\U0001e900', u'C', u'D']\n\n :param s: String to split\n :return: List of characters\n \"\"\"\n if not isinstance(s, six.text_type):\n raise TypeError(u\"String to split must be of type 'unicode'!\")\n surrogates_regex_str = u\"[{0}-{1}][{2}-{3}]\".format(HIGH_SURROGATE_START,\n HIGH_SURROGATE_END,\n LOW_SURROGATE_START,\n LOW_SURROGATE_END)\n surrogates_regex = re.compile(u\"(?:{0})|.\".format(surrogates_regex_str))\n return surrogates_regex.findall(s)\n" ]
from collections import defaultdict, namedtuple, OrderedDict from fractions import Fraction import codecs import os import re import struct import six try: # Python 2 style import from hangulutil import _get_hangul_syllable_name except ImportError: # Python 3 style import from .hangulutil import _get_hangul_syllable_name #: Ranges of surrogate pairs HIGH_SURROGATE_START = u"\ud800" HIGH_SURROGATE_END = u"\udbff" LOW_SURROGATE_START = u"\udc00" LOW_SURROGATE_END = u"\udfff" def preservesurrogates(s): """ Function for splitting a string into a list of characters, preserving surrogate pairs. In python 2, unicode characters above 0x10000 are stored as surrogate pairs. For example, the Unicode character u"\U0001e900" is stored as the surrogate pair u"\ud83a\udd00": s = u"AB\U0001e900CD" len(s) -> 6 list(s) -> [u'A', u'B', u'\ud83a', u'\udd00', u'C', 'D'] len(preservesurrogates(s)) -> 5 list(preservesurrogates(s)) -> [u'A', u'B', u'\U0001e900', u'C', u'D'] :param s: String to split :return: List of characters """ if not isinstance(s, six.text_type): raise TypeError(u"String to split must be of type 'unicode'!") surrogates_regex_str = u"[{0}-{1}][{2}-{3}]".format(HIGH_SURROGATE_START, HIGH_SURROGATE_END, LOW_SURROGATE_START, LOW_SURROGATE_END) surrogates_regex = re.compile(u"(?:{0})|.".format(surrogates_regex_str)) return surrogates_regex.findall(s) def _unichr(i): """ Helper function for taking a Unicode scalar value and returning a Unicode character. :param s: Unicode scalar value to convert. :return: Unicode character """ if not isinstance(i, int): raise TypeError try: return six.unichr(i) except ValueError: # Workaround the error "ValueError: unichr() arg not in range(0x10000) (narrow Python build)" return struct.pack("i", i).decode("utf-32") def _hexstr_to_unichr(s): """ Helper function for taking a hex string and returning a Unicode character. :param s: hex string to convert :return: Unicode character """ return _unichr(int(s, 16)) def _padded_hex(i, pad_width=4, uppercase=True): """ Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes until the string is of the specified width. For example: _padded_hex(31, pad_width=4, uppercase=True) -> "001F" :param i: integer to convert to a hex string :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0' as needed. :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True). :return: Hex string representation of the input integer. """ result = hex(i)[2:] # Remove the leading "0x" if uppercase: result = result.upper() return result.zfill(pad_width) def _uax44lm2transform(s): """ Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>. The rule is defined as follows: "UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in U+1180 HANGUL JUNGSEONG O-E." Therefore, correctly implementing the rule involves performing the following three operations, in order: 1. remove all medial hyphens (except the medial hyphen in the name for U+1180) 2. remove all whitespace and underscore characters 3. apply toLowercase() to both strings A "medial hyphen" is defined as follows (quoted from the above referenced web page): "In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently occur medially as a result of removing whitespace before removing hyphens in a particular implementation of matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in loose matching." :param s: String to transform :return: String transformed per UAX44-LM2 loose matching rule. """ result = s # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the # lookbehind assertion (?<=\w)) and immediately after (the lookahead assertion (?=\w)) the hyphen, per the "medial # hyphen" definition that it is a hyphen occurring immediately between two letters. medialhyphen = re.compile(r"(?<=\w)-(?=\w)") whitespaceunderscore = re.compile(r"[\s_]", re.UNICODE) # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name if result != "HANGUL JUNGSEONG O-E": result = medialhyphen.sub("", result) result = whitespaceunderscore.sub("", result) return result.lower() def _to_unicode_scalar_value(s): """ Helper function for converting a character or surrogate pair into a Unicode scalar value e.g. "\ud800\udc00" -> 0x10000 The algorithm can be found in older versions of the Unicode Standard. https://unicode.org/versions/Unicode3.0.0/ch03.pdf, Section 3.7, D28 Unicode scalar value: a number N from 0 to 0x10FFFF is defined by applying the following algorithm to a character sequence S: If S is a single, non-surrogate value U: N = U If S is a surrogate pair H, L: N = (H - 0xD800) * 0x0400 + (L - 0xDC00) + 0x10000 :param s: :return: """ if len(s) == 1: return ord(s) elif len(s) == 2: return (ord(s[0]) - 0xD800) * 0x0400 + (ord(s[1]) - 0xDC00) + 0x10000 else: raise ValueError #: Dictionary for looking up the prefixes for derived names. #: See Unicode Standard section 4.8 and table 4-8 for more information on the name derivation rules NR1 and NR2. #: https://www.unicode.org/versions/Unicode10.0.0/ch04.pdf _nr_prefix_strings = { six.moves.range( 0xAC00, 0xD7A3 + 1): "HANGUL SYLLABLE ", six.moves.range( 0x3400, 0x4DB5 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range( 0x4E00, 0x9FEA + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x20000, 0x2A6D6 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2A700, 0x2B734 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B740, 0x2B81D + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2B820, 0x2CEA1 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x2CEB0, 0x2EBE0 + 1): "CJK UNIFIED IDEOGRAPH-", six.moves.range(0x17000, 0x187EC + 1): "TANGUT IDEOGRAPH-", six.moves.range(0x1B170, 0x1B2FB + 1): "NUSHU CHARACTER-", six.moves.range( 0xF900, 0xFA6D + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range( 0xFA70, 0xFAD9 + 1): "CJK COMPATIBILITY IDEOGRAPH-", six.moves.range(0x2F800, 0x2FA1D + 1): "CJK COMPATIBILITY IDEOGRAPH-" } def _is_derived(i): """ Helper function for determining if a Unicode scalar value falls into one of the ranges of derived names. :param i: Unicode scalar value. :return: Boolean. True if the value is in one of the derived ranges. False otherwise. """ for lookup_range in _nr_prefix_strings.keys(): if i in lookup_range: return True return False def _get_nr_prefix(i): """ Helper function for looking up the derived name prefix associated with a Unicode scalar value. :param i: Unicode scalar value. :return: String with the derived name prefix. """ for lookup_range, prefix_string in _nr_prefix_strings.items(): if i in lookup_range: return prefix_string raise ValueError("No prefix string associated with {0}!".format(i)) #: Documentation on the fields of UnicodeData.txt: #: https://www.unicode.org/L2/L1999/UnicodeData.html #: https://www.unicode.org/reports/tr44/#UnicodeData.txt UnicodeCharacter = namedtuple("UnicodeCharacter", ["code", "name", "category", "combining", "bidi", "decomposition", "decimal", "digit", "numeric", "mirrored", "unicode_1_name", "iso_comment", "uppercase", "lowercase", "titlecase"]) class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def _load_unicode_block_info(self): """ Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c)) class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c casefold_map = CaseFoldingMap()
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
UnicodeData._build_unicode_character_database
python
def _build_unicode_character_database(self): filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data
Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L228-L278
[ "def _get_hangul_syllable_name(hangul_syllable):\n \"\"\"\n Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as\n defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information.\n\n :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert\n :return: String representing its syllable name as transformed according to naming rule NR1.\n \"\"\"\n if not _is_hangul_syllable(hangul_syllable):\n raise ValueError(\"Value passed in does not represent a Hangul syllable!\")\n jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True)\n result = ''\n for j in jamo:\n if j is not None:\n result += _get_jamo_short_name(j)\n return result\n", "def _padded_hex(i, pad_width=4, uppercase=True):\n \"\"\"\n Helper function for taking an integer and returning a hex string. The string will be padded on the left with zeroes\n until the string is of the specified width. For example:\n\n _padded_hex(31, pad_width=4, uppercase=True) -> \"001F\"\n\n :param i: integer to convert to a hex string\n :param pad_width: (int specifying the minimum width of the output string. String will be padded on the left with '0'\n as needed.\n :param uppercase: Boolean indicating if we should use uppercase characters in the output string (default=True).\n :return: Hex string representation of the input integer.\n \"\"\"\n result = hex(i)[2:] # Remove the leading \"0x\"\n if uppercase:\n result = result.upper()\n return result.zfill(pad_width)\n", "def _uax44lm2transform(s):\n \"\"\"\n Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching\n rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>.\n\n The rule is defined as follows:\n\n \"UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in\n U+1180 HANGUL JUNGSEONG O-E.\"\n\n Therefore, correctly implementing the rule involves performing the following three operations, in order:\n\n 1. remove all medial hyphens (except the medial hyphen in the name for U+1180)\n 2. remove all whitespace and underscore characters\n 3. apply toLowercase() to both strings\n\n A \"medial hyphen\" is defined as follows (quoted from the above referenced web page):\n\n \"In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the\n normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently\n occur medially as a result of removing whitespace before removing hyphens in a particular implementation of\n matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in\n loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in\n loose matching.\"\n\n\n :param s: String to transform\n :return: String transformed per UAX44-LM2 loose matching rule.\n \"\"\"\n result = s\n\n # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the\n # lookbehind assertion (?<=\\w)) and immediately after (the lookahead assertion (?=\\w)) the hyphen, per the \"medial\n # hyphen\" definition that it is a hyphen occurring immediately between two letters.\n medialhyphen = re.compile(r\"(?<=\\w)-(?=\\w)\")\n whitespaceunderscore = re.compile(r\"[\\s_]\", re.UNICODE)\n\n # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name\n if result != \"HANGUL JUNGSEONG O-E\":\n result = medialhyphen.sub(\"\", result)\n result = whitespaceunderscore.sub(\"\", result)\n return result.lower()\n" ]
class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name)) def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
UnicodeData.lookup_by_name
python
def lookup_by_name(self, name): try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name))
Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L339-L359
[ "def _uax44lm2transform(s):\n \"\"\"\n Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching\n rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>.\n\n The rule is defined as follows:\n\n \"UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in\n U+1180 HANGUL JUNGSEONG O-E.\"\n\n Therefore, correctly implementing the rule involves performing the following three operations, in order:\n\n 1. remove all medial hyphens (except the medial hyphen in the name for U+1180)\n 2. remove all whitespace and underscore characters\n 3. apply toLowercase() to both strings\n\n A \"medial hyphen\" is defined as follows (quoted from the above referenced web page):\n\n \"In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the\n normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently\n occur medially as a result of removing whitespace before removing hyphens in a particular implementation of\n matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in\n loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in\n loose matching.\"\n\n\n :param s: String to transform\n :return: String transformed per UAX44-LM2 loose matching rule.\n \"\"\"\n result = s\n\n # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the\n # lookbehind assertion (?<=\\w)) and immediately after (the lookahead assertion (?=\\w)) the hyphen, per the \"medial\n # hyphen\" definition that it is a hyphen occurring immediately between two letters.\n medialhyphen = re.compile(r\"(?<=\\w)-(?=\\w)\")\n whitespaceunderscore = re.compile(r\"[\\s_]\", re.UNICODE)\n\n # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name\n if result != \"HANGUL JUNGSEONG O-E\":\n result = medialhyphen.sub(\"\", result)\n result = whitespaceunderscore.sub(\"\", result)\n return result.lower()\n" ]
class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_partial_name(self, partial_name): """ Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter. """ for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
UnicodeData.lookup_by_partial_name
python
def lookup_by_partial_name(self, partial_name): for k, v in self._name_database.items(): if _uax44lm2transform(partial_name) in k: yield v
Similar to lookup_by_name(name), this method uses loose matching rule UAX44-LM2 to attempt to find the UnicodeCharacter associated with a name. However, it attempts to permit even looser matching by doing a substring search instead of a simple match. This method will return a generator that yields instances of UnicodeCharacter where the partial_name passed in is a substring of the full name. For example: >>> ucd = UnicodeData() >>> for data in ucd.lookup_by_partial_name("SHARP S"): >>> print(data.code + " " + data.name) >>> >>> U+00DF LATIN SMALL LETTER SHARP S >>> U+1E9E LATIN CAPITAL LETTER SHARP S >>> U+266F MUSIC SHARP SIGN :param partial_name: Partial name of the character to look up. :return: Generator that yields instances of UnicodeCharacter.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L361-L383
[ "def _uax44lm2transform(s):\n \"\"\"\n Helper function for taking a string (i.e. a Unicode character name) and transforming it via UAX44-LM2 loose matching\n rule. For more information, see <https://www.unicode.org/reports/tr44/#UAX44-LM2>.\n\n The rule is defined as follows:\n\n \"UAX44-LM2. Ignore case, whitespace, underscore ('_'), and all medial hyphens except the hyphen in\n U+1180 HANGUL JUNGSEONG O-E.\"\n\n Therefore, correctly implementing the rule involves performing the following three operations, in order:\n\n 1. remove all medial hyphens (except the medial hyphen in the name for U+1180)\n 2. remove all whitespace and underscore characters\n 3. apply toLowercase() to both strings\n\n A \"medial hyphen\" is defined as follows (quoted from the above referenced web page):\n\n \"In this rule 'medial hyphen' is to be construed as a hyphen occurring immediately between two letters in the\n normative Unicode character name, as published in the Unicode names list, and not to any hyphen that may transiently\n occur medially as a result of removing whitespace before removing hyphens in a particular implementation of\n matching. Thus the hyphen in the name U+10089 LINEAR B IDEOGRAM B107M HE-GOAT is medial, and should be ignored in\n loose matching, but the hyphen in the name U+0F39 TIBETAN MARK TSA -PHRU is not medial, and should not be ignored in\n loose matching.\"\n\n\n :param s: String to transform\n :return: String transformed per UAX44-LM2 loose matching rule.\n \"\"\"\n result = s\n\n # For the regex, we are using lookaround assertions to verify that there is a word character immediately before (the\n # lookbehind assertion (?<=\\w)) and immediately after (the lookahead assertion (?=\\w)) the hyphen, per the \"medial\n # hyphen\" definition that it is a hyphen occurring immediately between two letters.\n medialhyphen = re.compile(r\"(?<=\\w)-(?=\\w)\")\n whitespaceunderscore = re.compile(r\"[\\s_]\", re.UNICODE)\n\n # Ok to hard code, this name should never change: https://www.unicode.org/policies/stability_policy.html#Name\n if result != \"HANGUL JUNGSEONG O-E\":\n result = medialhyphen.sub(\"\", result)\n result = whitespaceunderscore.sub(\"\", result)\n return result.lower()\n" ]
class UnicodeData: """Class for encapsulating the data in UnicodeData.txt""" def __init__(self): """Initialize the class by building the Unicode character database.""" self._unicode_character_database = {} self._name_database = {} self._build_unicode_character_database() def _build_unicode_character_database(self): """ Function for parsing the Unicode character data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "UnicodeData.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) tag = re.compile(r"<\w+?>") with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip(): continue data = line.strip().split(";") # Replace the start/end range markers with their proper derived names. if data[1].endswith((u"First>", u"Last>")) and _is_derived(int(data[0], 16)): data[1] = _get_nr_prefix(int(data[0], 16)) if data[1].startswith("HANGUL SYLLABLE"): # For Hangul syllables, use naming rule NR1 data[1] += _get_hangul_syllable_name(int(data[0], 16)) else: # Others should use naming rule NR2 data[1] += data[0] data[3] = int(data[3]) # Convert the Canonical Combining Class value into an int. if data[5]: # Convert the contents of the decomposition into characters, preserving tag info. data[5] = u" ".join([_hexstr_to_unichr(s) if not tag.match(s) else s for s in data[5].split()]) for i in [6, 7, 8]: # Convert the decimal, digit and numeric fields to either ints or fractions. if data[i]: if "/" in data[i]: data[i] = Fraction(data[i]) else: data[i] = int(data[i]) for i in [12, 13, 14]: # Convert the uppercase, lowercase and titlecase fields to characters. if data[i]: data[i] = _hexstr_to_unichr(data[i]) lookup_name = _uax44lm2transform(data[1]) uc_data = UnicodeCharacter(u"U+" + data[0], *data[1:]) self._unicode_character_database[int(data[0], 16)] = uc_data self._name_database[lookup_name] = uc_data # Fill out the "compressed" ranges of UnicodeData.txt i.e. fill out the remaining characters per the Name # Derivation Rules. See the Unicode Standard, ch. 4, section 4.8, Unicode Name Property for lookup_range, prefix_string in _nr_prefix_strings.items(): exemplar = self._unicode_character_database.__getitem__(lookup_range[0]) for item in lookup_range: hex_code = _padded_hex(item) new_name = prefix_string if prefix_string.startswith("HANGUL SYLLABLE"): # For Hangul, use naming rule NR1 new_name += _get_hangul_syllable_name(item) else: # Everything else uses naming rule NR2 new_name += hex_code uc_data = exemplar._replace(code=u"U+" + hex_code, name=new_name) self._unicode_character_database[item] = uc_data self._name_database[_uax44lm2transform(new_name)] = uc_data def get(self, value): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: UnicodeCharacter instance with data associated with the specified value. """ return self._unicode_character_database.__getitem__(item) def __iter__(self): """Function for iterating through the keys of the data.""" return self._unicode_character_database.__iter__() def __len__(self): """Function for returning the size of the data.""" return self._unicode_character_database.__len__() def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_character_database.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_character_database.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_character_database.values() def lookup_by_char(self, c): """ Function for retrieving the UnicodeCharacter associated with the specified Unicode character. :param c: Unicode character to look up. :return: UnicodeCharacter instance with data associated with the specified Unicode character. """ return self._unicode_character_database[_to_unicode_scalar_value(c)] def lookup_by_name(self, name): """ Function for retrieving the UnicodeCharacter associated with a name. The name lookup uses the loose matching rule UAX44-LM2 for loose matching. See the following for more info: https://www.unicode.org/reports/tr44/#UAX44-LM2 For example: ucd = UnicodeData() ucd.lookup_by_name("LATIN SMALL LETTER SHARP S") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) ucd.lookup_by_name("latin_small_letter_sharp_s") -> UnicodeCharacter(name='LATIN SMALL LETTER SHARP S',...) :param name: Name of the character to look up. :return: UnicodeCharacter instance with data associated with the character. """ try: return self._name_database[_uax44lm2transform(name)] except KeyError: raise KeyError(u"Unknown character name: '{0}'!".format(name))
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
UnicodeBlocks._load_unicode_block_info
python
def _load_unicode_block_info(self): filename = "Blocks.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') # Format: Start Code..End Code; Block Name block_range, block_name = line.strip().split(";") start_range, end_range = block_range.strip().split("..") self._unicode_blocks[six.moves.range(int(start_range, 16), int(end_range, 16) + 1)] = block_name.strip()
Function for parsing the Unicode block info from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L394-L409
null
class UnicodeBlocks: """Class for encapsulating the data in Blocks.txt""" def __init__(self): """Initialize the class by loading the Unicode block info.""" self._unicode_blocks = OrderedDict() self._load_unicode_block_info() def get(self, value): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param value: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ return self.__getitem__(value) def __getitem__(self, item): """ Function for retrieving the Unicode Block name associated with the specified Unicode scalar value. :param item: Unicode scalar value to look up. :return: Unicode Block name associated with the specified value. """ for block_range, name in self._unicode_blocks.items(): if item in block_range: return name return u"No_Block" def items(self): """ Returns a list of the data's (key, value) pairs, as tuples. :return: list of (key, value) pairs, as tuples. """ return self._unicode_blocks.items() def keys(self): """ Returns a list of the data's keys. :return: list of the data's keys """ return self._unicode_blocks.keys() def values(self): """ Returns a list of the data's values. :return: list of the data's values. """ return self._unicode_blocks.values() def lookup_by_char(self, c): """ Function for retrieving the Unicode Block name associated with the specified Unicode character. :param c: Unicode character to look up. :return: Unicode Block name associated with the specified Unicode character. """ return self.__getitem__(_to_unicode_scalar_value(c))
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
CaseFoldingMap._build_casefold_map
python
def _build_casefold_map(self): self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target
Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L473-L489
null
class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def lookup(self, c, lookup_order="CF"): """ Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order: """ if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c
leonidessaguisagjr/unicodeutil
unicodeutil/unicodeutil.py
CaseFoldingMap.lookup
python
def lookup(self, c, lookup_order="CF"): if not isinstance(c, six.text_type): raise TypeError(u"Character to lookup must be of type 'unicode'!") for d in lookup_order: try: return self._casefold_map[d][c] except KeyError: pass return c
Function to lookup a character in the casefold map. The casefold map has four sub-tables, the 'C' or common table, the 'F' or full table, the 'S' or simple table and the 'T' or the Turkic special case table. These tables correspond to the statuses defined in the CaseFolding.txt file. We can specify the order of the tables to use for performing the lookup by the lookup_order parameter. Per the usage specified in the CaseFolding.txt file, we can use the 'C' and 'S' tables for doing a simple case fold. To perform a full case fold, we can use the 'C' and 'F' tables. The default behavior for this function is a full case fold (lookup_order="CF"). :param c: character to lookup :param lookup_order:
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/unicodeutil.py#L491-L516
null
class CaseFoldingMap: """Class for performing Unicode case folding.""" def __init__(self): """Initialize the class by building the casefold map.""" self._build_casefold_map() def _build_casefold_map(self): """ Function for parsing the case folding data from the Unicode Character Database (UCD) and generating a lookup table. For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ self._casefold_map = defaultdict(dict) filename = "CaseFolding.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') code, status, mapping, name = line.split(";") src = _hexstr_to_unichr(code) target = u"".join([_hexstr_to_unichr(c) for c in mapping.strip().split()]) self._casefold_map[status.strip()][src] = target
leonidessaguisagjr/unicodeutil
unicodeutil/hangulutil.py
_load_hangul_syllable_types
python
def _load_hangul_syllable_types(): filename = "HangulSyllableType.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") syllable_type, _ = map(six.text_type.strip, data[1].split("#")) if ".." in data[0]: # If it is a range and not a single value start, end = map(lambda x: int(x, 16), data[0].strip().split("..")) for idx in range(start, end + 1): _hangul_syllable_types[idx] = syllable_type else: _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type
Helper function for parsing the contents of "HangulSyllableType.txt" from the Unicode Character Database (UCD) and generating a lookup table for determining whether or not a given Hangul syllable is of type "L", "V", "T", "LV" or "LVT". For more info on the UCD, see the following website: https://www.unicode.org/ucd/
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/hangulutil.py#L10-L29
null
import codecs import os import six _hangul_syllable_types = {} _jamo_short_names = {} def _load_jamo_short_names(): """ Function for parsing the Jamo short names from the Unicode Character Database (UCD) and generating a lookup table For more info on how this is used, see the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior" and ch. 04, section 4.8, "Name". https://www.unicode.org/versions/latest/ch03.pdf https://www.unicode.org/versions/latest/ch04.pdf """ filename = "Jamo.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") code = int(data[0].strip(), 16) char_info = data[1].split("#") short_name = char_info[0].strip() _jamo_short_names[code] = short_name def _is_hangul_syllable(i): """ Function for determining if a Unicode scalar value i is within the range of Hangul syllables. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt return True return False def _is_jamo(i): """ Function for determining if a Unicode scalar value i is within the range of Jamo. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0x1100, 0x11ff + 1): # Range of Jamo as defined in Blocks.txt, "1100..11FF; Hangul Jamo" return True return False def _get_hangul_syllable_type(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT") """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable] def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo] #: Common constants for decomposing and composing Hangul syllables S_BASE = 0xAC00 # U+AC00, start of Hangul syllable range L_BASE = 0x1100 # U+1100, start of Hangul leading consonant / syllable-initial range i.e. Hangul Choseong V_BASE = 0x1161 # U+1161, start of Hangul vowel / syllable-peak range i.e Hangul Jungseong T_BASE = 0x11a7 # U+11A7, start of Hangul trailing consonant / syllable-final range i.e. Hangul Jongseong L_COUNT = 19 # Count of Hangul Choseong V_COUNT = 21 # Count of Hangul Jungseong T_COUNT = 28 # Count of Hangul Jongseong + 1 N_COUNT = V_COUNT * T_COUNT S_COUNT = L_COUNT * N_COUNT def compose_hangul_syllable(jamo): """ Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable """ fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) def decompose_hangul_syllable(hangul_syllable, fully_decompose=False): """ Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be raised. The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior". Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default) U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition) :param hangul_syllable: Unicode scalar value for Hangul syllable :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is fully_decompose=False) or a full canonical decomposition (fully_decompose=True) :return: Tuple of Unicode scalar values for the decomposed Jamo. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") s_index = hangul_syllable - S_BASE if fully_decompose: l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT t_index = s_index % T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index t_part = (T_BASE + t_index) if t_index > 0 else None return l_part, v_part, t_part else: if _get_hangul_syllable_type(hangul_syllable) == "LV": # Hangul_Syllable_Type = LV l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index return l_part, v_part else: # Assume Hangul_Syllable_Type = LVT lv_index = (s_index // T_COUNT) * T_COUNT t_index = s_index % T_COUNT lv_part = S_BASE + lv_index t_part = T_BASE + t_index return lv_part, t_part def _get_hangul_syllable_name(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
leonidessaguisagjr/unicodeutil
unicodeutil/hangulutil.py
_load_jamo_short_names
python
def _load_jamo_short_names(): filename = "Jamo.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") code = int(data[0].strip(), 16) char_info = data[1].split("#") short_name = char_info[0].strip() _jamo_short_names[code] = short_name
Function for parsing the Jamo short names from the Unicode Character Database (UCD) and generating a lookup table For more info on how this is used, see the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior" and ch. 04, section 4.8, "Name". https://www.unicode.org/versions/latest/ch03.pdf https://www.unicode.org/versions/latest/ch04.pdf
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/hangulutil.py#L32-L51
null
import codecs import os import six _hangul_syllable_types = {} _jamo_short_names = {} def _load_hangul_syllable_types(): """ Helper function for parsing the contents of "HangulSyllableType.txt" from the Unicode Character Database (UCD) and generating a lookup table for determining whether or not a given Hangul syllable is of type "L", "V", "T", "LV" or "LVT". For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "HangulSyllableType.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") syllable_type, _ = map(six.text_type.strip, data[1].split("#")) if ".." in data[0]: # If it is a range and not a single value start, end = map(lambda x: int(x, 16), data[0].strip().split("..")) for idx in range(start, end + 1): _hangul_syllable_types[idx] = syllable_type else: _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type def _is_hangul_syllable(i): """ Function for determining if a Unicode scalar value i is within the range of Hangul syllables. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt return True return False def _is_jamo(i): """ Function for determining if a Unicode scalar value i is within the range of Jamo. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0x1100, 0x11ff + 1): # Range of Jamo as defined in Blocks.txt, "1100..11FF; Hangul Jamo" return True return False def _get_hangul_syllable_type(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT") """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable] def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo] #: Common constants for decomposing and composing Hangul syllables S_BASE = 0xAC00 # U+AC00, start of Hangul syllable range L_BASE = 0x1100 # U+1100, start of Hangul leading consonant / syllable-initial range i.e. Hangul Choseong V_BASE = 0x1161 # U+1161, start of Hangul vowel / syllable-peak range i.e Hangul Jungseong T_BASE = 0x11a7 # U+11A7, start of Hangul trailing consonant / syllable-final range i.e. Hangul Jongseong L_COUNT = 19 # Count of Hangul Choseong V_COUNT = 21 # Count of Hangul Jungseong T_COUNT = 28 # Count of Hangul Jongseong + 1 N_COUNT = V_COUNT * T_COUNT S_COUNT = L_COUNT * N_COUNT def compose_hangul_syllable(jamo): """ Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable """ fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) def decompose_hangul_syllable(hangul_syllable, fully_decompose=False): """ Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be raised. The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior". Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default) U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition) :param hangul_syllable: Unicode scalar value for Hangul syllable :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is fully_decompose=False) or a full canonical decomposition (fully_decompose=True) :return: Tuple of Unicode scalar values for the decomposed Jamo. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") s_index = hangul_syllable - S_BASE if fully_decompose: l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT t_index = s_index % T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index t_part = (T_BASE + t_index) if t_index > 0 else None return l_part, v_part, t_part else: if _get_hangul_syllable_type(hangul_syllable) == "LV": # Hangul_Syllable_Type = LV l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index return l_part, v_part else: # Assume Hangul_Syllable_Type = LVT lv_index = (s_index // T_COUNT) * T_COUNT t_index = s_index % T_COUNT lv_part = S_BASE + lv_index t_part = T_BASE + t_index return lv_part, t_part def _get_hangul_syllable_name(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
leonidessaguisagjr/unicodeutil
unicodeutil/hangulutil.py
_get_hangul_syllable_type
python
def _get_hangul_syllable_type(hangul_syllable): if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable]
Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT")
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/hangulutil.py#L78-L93
[ "def _load_hangul_syllable_types():\n \"\"\"\n Helper function for parsing the contents of \"HangulSyllableType.txt\" from the Unicode Character Database (UCD) and\n generating a lookup table for determining whether or not a given Hangul syllable is of type \"L\", \"V\", \"T\", \"LV\" or\n \"LVT\". For more info on the UCD, see the following website: https://www.unicode.org/ucd/\n \"\"\"\n filename = \"HangulSyllableType.txt\"\n current_dir = os.path.abspath(os.path.dirname(__file__))\n with codecs.open(os.path.join(current_dir, filename), mode=\"r\", encoding=\"utf-8\") as fp:\n for line in fp:\n if not line.strip() or line.startswith(\"#\"):\n continue # Skip empty lines or lines that are comments (comments start with '#')\n data = line.strip().split(\";\")\n syllable_type, _ = map(six.text_type.strip, data[1].split(\"#\"))\n if \"..\" in data[0]: # If it is a range and not a single value\n start, end = map(lambda x: int(x, 16), data[0].strip().split(\"..\"))\n for idx in range(start, end + 1):\n _hangul_syllable_types[idx] = syllable_type\n else:\n _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type\n", "def _is_hangul_syllable(i):\n \"\"\"\n Function for determining if a Unicode scalar value i is within the range of Hangul syllables.\n\n :param i: Unicode scalar value to lookup\n :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False.\n \"\"\"\n if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt\n return True\n return False\n" ]
import codecs import os import six _hangul_syllable_types = {} _jamo_short_names = {} def _load_hangul_syllable_types(): """ Helper function for parsing the contents of "HangulSyllableType.txt" from the Unicode Character Database (UCD) and generating a lookup table for determining whether or not a given Hangul syllable is of type "L", "V", "T", "LV" or "LVT". For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "HangulSyllableType.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") syllable_type, _ = map(six.text_type.strip, data[1].split("#")) if ".." in data[0]: # If it is a range and not a single value start, end = map(lambda x: int(x, 16), data[0].strip().split("..")) for idx in range(start, end + 1): _hangul_syllable_types[idx] = syllable_type else: _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type def _load_jamo_short_names(): """ Function for parsing the Jamo short names from the Unicode Character Database (UCD) and generating a lookup table For more info on how this is used, see the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior" and ch. 04, section 4.8, "Name". https://www.unicode.org/versions/latest/ch03.pdf https://www.unicode.org/versions/latest/ch04.pdf """ filename = "Jamo.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") code = int(data[0].strip(), 16) char_info = data[1].split("#") short_name = char_info[0].strip() _jamo_short_names[code] = short_name def _is_hangul_syllable(i): """ Function for determining if a Unicode scalar value i is within the range of Hangul syllables. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt return True return False def _is_jamo(i): """ Function for determining if a Unicode scalar value i is within the range of Jamo. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0x1100, 0x11ff + 1): # Range of Jamo as defined in Blocks.txt, "1100..11FF; Hangul Jamo" return True return False def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo] #: Common constants for decomposing and composing Hangul syllables S_BASE = 0xAC00 # U+AC00, start of Hangul syllable range L_BASE = 0x1100 # U+1100, start of Hangul leading consonant / syllable-initial range i.e. Hangul Choseong V_BASE = 0x1161 # U+1161, start of Hangul vowel / syllable-peak range i.e Hangul Jungseong T_BASE = 0x11a7 # U+11A7, start of Hangul trailing consonant / syllable-final range i.e. Hangul Jongseong L_COUNT = 19 # Count of Hangul Choseong V_COUNT = 21 # Count of Hangul Jungseong T_COUNT = 28 # Count of Hangul Jongseong + 1 N_COUNT = V_COUNT * T_COUNT S_COUNT = L_COUNT * N_COUNT def compose_hangul_syllable(jamo): """ Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable """ fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) def decompose_hangul_syllable(hangul_syllable, fully_decompose=False): """ Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be raised. The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior". Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default) U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition) :param hangul_syllable: Unicode scalar value for Hangul syllable :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is fully_decompose=False) or a full canonical decomposition (fully_decompose=True) :return: Tuple of Unicode scalar values for the decomposed Jamo. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") s_index = hangul_syllable - S_BASE if fully_decompose: l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT t_index = s_index % T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index t_part = (T_BASE + t_index) if t_index > 0 else None return l_part, v_part, t_part else: if _get_hangul_syllable_type(hangul_syllable) == "LV": # Hangul_Syllable_Type = LV l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index return l_part, v_part else: # Assume Hangul_Syllable_Type = LVT lv_index = (s_index // T_COUNT) * T_COUNT t_index = s_index % T_COUNT lv_part = S_BASE + lv_index t_part = T_BASE + t_index return lv_part, t_part def _get_hangul_syllable_name(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
leonidessaguisagjr/unicodeutil
unicodeutil/hangulutil.py
_get_jamo_short_name
python
def _get_jamo_short_name(jamo): if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo]
Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/hangulutil.py#L96-L111
null
import codecs import os import six _hangul_syllable_types = {} _jamo_short_names = {} def _load_hangul_syllable_types(): """ Helper function for parsing the contents of "HangulSyllableType.txt" from the Unicode Character Database (UCD) and generating a lookup table for determining whether or not a given Hangul syllable is of type "L", "V", "T", "LV" or "LVT". For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "HangulSyllableType.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") syllable_type, _ = map(six.text_type.strip, data[1].split("#")) if ".." in data[0]: # If it is a range and not a single value start, end = map(lambda x: int(x, 16), data[0].strip().split("..")) for idx in range(start, end + 1): _hangul_syllable_types[idx] = syllable_type else: _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type def _load_jamo_short_names(): """ Function for parsing the Jamo short names from the Unicode Character Database (UCD) and generating a lookup table For more info on how this is used, see the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior" and ch. 04, section 4.8, "Name". https://www.unicode.org/versions/latest/ch03.pdf https://www.unicode.org/versions/latest/ch04.pdf """ filename = "Jamo.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") code = int(data[0].strip(), 16) char_info = data[1].split("#") short_name = char_info[0].strip() _jamo_short_names[code] = short_name def _is_hangul_syllable(i): """ Function for determining if a Unicode scalar value i is within the range of Hangul syllables. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt return True return False def _is_jamo(i): """ Function for determining if a Unicode scalar value i is within the range of Jamo. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0x1100, 0x11ff + 1): # Range of Jamo as defined in Blocks.txt, "1100..11FF; Hangul Jamo" return True return False def _get_hangul_syllable_type(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT") """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable] #: Common constants for decomposing and composing Hangul syllables S_BASE = 0xAC00 # U+AC00, start of Hangul syllable range L_BASE = 0x1100 # U+1100, start of Hangul leading consonant / syllable-initial range i.e. Hangul Choseong V_BASE = 0x1161 # U+1161, start of Hangul vowel / syllable-peak range i.e Hangul Jungseong T_BASE = 0x11a7 # U+11A7, start of Hangul trailing consonant / syllable-final range i.e. Hangul Jongseong L_COUNT = 19 # Count of Hangul Choseong V_COUNT = 21 # Count of Hangul Jungseong T_COUNT = 28 # Count of Hangul Jongseong + 1 N_COUNT = V_COUNT * T_COUNT S_COUNT = L_COUNT * N_COUNT def compose_hangul_syllable(jamo): """ Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable """ fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) def decompose_hangul_syllable(hangul_syllable, fully_decompose=False): """ Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be raised. The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior". Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default) U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition) :param hangul_syllable: Unicode scalar value for Hangul syllable :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is fully_decompose=False) or a full canonical decomposition (fully_decompose=True) :return: Tuple of Unicode scalar values for the decomposed Jamo. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") s_index = hangul_syllable - S_BASE if fully_decompose: l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT t_index = s_index % T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index t_part = (T_BASE + t_index) if t_index > 0 else None return l_part, v_part, t_part else: if _get_hangul_syllable_type(hangul_syllable) == "LV": # Hangul_Syllable_Type = LV l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index return l_part, v_part else: # Assume Hangul_Syllable_Type = LVT lv_index = (s_index // T_COUNT) * T_COUNT t_index = s_index % T_COUNT lv_part = S_BASE + lv_index t_part = T_BASE + t_index return lv_part, t_part def _get_hangul_syllable_name(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
leonidessaguisagjr/unicodeutil
unicodeutil/hangulutil.py
compose_hangul_syllable
python
def compose_hangul_syllable(jamo): fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo))
Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/hangulutil.py#L126-L167
[ "def _get_hangul_syllable_type(hangul_syllable):\n \"\"\"\n Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its\n Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard,\n ch. 03, section 3.12, Conjoining Jamo Behavior.\n\n https://www.unicode.org/versions/latest/ch03.pdf\n\n :param hangul_syllable: Unicode scalar value representing a Hangul syllable\n :return: Returns a string representing its Hangul_Syllable_Type property (\"L\", \"V\", \"T\", \"LV\" or \"LVT\")\n \"\"\"\n if not _is_hangul_syllable(hangul_syllable):\n raise ValueError(\"Value 0x%0.4x does not represent a Hangul syllable!\" % hangul_syllable)\n if not _hangul_syllable_types:\n _load_hangul_syllable_types()\n return _hangul_syllable_types[hangul_syllable]\n" ]
import codecs import os import six _hangul_syllable_types = {} _jamo_short_names = {} def _load_hangul_syllable_types(): """ Helper function for parsing the contents of "HangulSyllableType.txt" from the Unicode Character Database (UCD) and generating a lookup table for determining whether or not a given Hangul syllable is of type "L", "V", "T", "LV" or "LVT". For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "HangulSyllableType.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") syllable_type, _ = map(six.text_type.strip, data[1].split("#")) if ".." in data[0]: # If it is a range and not a single value start, end = map(lambda x: int(x, 16), data[0].strip().split("..")) for idx in range(start, end + 1): _hangul_syllable_types[idx] = syllable_type else: _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type def _load_jamo_short_names(): """ Function for parsing the Jamo short names from the Unicode Character Database (UCD) and generating a lookup table For more info on how this is used, see the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior" and ch. 04, section 4.8, "Name". https://www.unicode.org/versions/latest/ch03.pdf https://www.unicode.org/versions/latest/ch04.pdf """ filename = "Jamo.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") code = int(data[0].strip(), 16) char_info = data[1].split("#") short_name = char_info[0].strip() _jamo_short_names[code] = short_name def _is_hangul_syllable(i): """ Function for determining if a Unicode scalar value i is within the range of Hangul syllables. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt return True return False def _is_jamo(i): """ Function for determining if a Unicode scalar value i is within the range of Jamo. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0x1100, 0x11ff + 1): # Range of Jamo as defined in Blocks.txt, "1100..11FF; Hangul Jamo" return True return False def _get_hangul_syllable_type(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT") """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable] def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo] #: Common constants for decomposing and composing Hangul syllables S_BASE = 0xAC00 # U+AC00, start of Hangul syllable range L_BASE = 0x1100 # U+1100, start of Hangul leading consonant / syllable-initial range i.e. Hangul Choseong V_BASE = 0x1161 # U+1161, start of Hangul vowel / syllable-peak range i.e Hangul Jungseong T_BASE = 0x11a7 # U+11A7, start of Hangul trailing consonant / syllable-final range i.e. Hangul Jongseong L_COUNT = 19 # Count of Hangul Choseong V_COUNT = 21 # Count of Hangul Jungseong T_COUNT = 28 # Count of Hangul Jongseong + 1 N_COUNT = V_COUNT * T_COUNT S_COUNT = L_COUNT * N_COUNT def decompose_hangul_syllable(hangul_syllable, fully_decompose=False): """ Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be raised. The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior". Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default) U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition) :param hangul_syllable: Unicode scalar value for Hangul syllable :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is fully_decompose=False) or a full canonical decomposition (fully_decompose=True) :return: Tuple of Unicode scalar values for the decomposed Jamo. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") s_index = hangul_syllable - S_BASE if fully_decompose: l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT t_index = s_index % T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index t_part = (T_BASE + t_index) if t_index > 0 else None return l_part, v_part, t_part else: if _get_hangul_syllable_type(hangul_syllable) == "LV": # Hangul_Syllable_Type = LV l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index return l_part, v_part else: # Assume Hangul_Syllable_Type = LVT lv_index = (s_index // T_COUNT) * T_COUNT t_index = s_index % T_COUNT lv_part = S_BASE + lv_index t_part = T_BASE + t_index return lv_part, t_part def _get_hangul_syllable_name(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
leonidessaguisagjr/unicodeutil
unicodeutil/hangulutil.py
decompose_hangul_syllable
python
def decompose_hangul_syllable(hangul_syllable, fully_decompose=False): if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") s_index = hangul_syllable - S_BASE if fully_decompose: l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT t_index = s_index % T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index t_part = (T_BASE + t_index) if t_index > 0 else None return l_part, v_part, t_part else: if _get_hangul_syllable_type(hangul_syllable) == "LV": # Hangul_Syllable_Type = LV l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index return l_part, v_part else: # Assume Hangul_Syllable_Type = LVT lv_index = (s_index // T_COUNT) * T_COUNT t_index = s_index % T_COUNT lv_part = S_BASE + lv_index t_part = T_BASE + t_index return lv_part, t_part
Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be raised. The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior". Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default) U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition) :param hangul_syllable: Unicode scalar value for Hangul syllable :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is fully_decompose=False) or a full canonical decomposition (fully_decompose=True) :return: Tuple of Unicode scalar values for the decomposed Jamo.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/hangulutil.py#L170-L212
[ "def _is_hangul_syllable(i):\n \"\"\"\n Function for determining if a Unicode scalar value i is within the range of Hangul syllables.\n\n :param i: Unicode scalar value to lookup\n :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False.\n \"\"\"\n if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt\n return True\n return False\n", "def _get_hangul_syllable_type(hangul_syllable):\n \"\"\"\n Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its\n Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard,\n ch. 03, section 3.12, Conjoining Jamo Behavior.\n\n https://www.unicode.org/versions/latest/ch03.pdf\n\n :param hangul_syllable: Unicode scalar value representing a Hangul syllable\n :return: Returns a string representing its Hangul_Syllable_Type property (\"L\", \"V\", \"T\", \"LV\" or \"LVT\")\n \"\"\"\n if not _is_hangul_syllable(hangul_syllable):\n raise ValueError(\"Value 0x%0.4x does not represent a Hangul syllable!\" % hangul_syllable)\n if not _hangul_syllable_types:\n _load_hangul_syllable_types()\n return _hangul_syllable_types[hangul_syllable]\n" ]
import codecs import os import six _hangul_syllable_types = {} _jamo_short_names = {} def _load_hangul_syllable_types(): """ Helper function for parsing the contents of "HangulSyllableType.txt" from the Unicode Character Database (UCD) and generating a lookup table for determining whether or not a given Hangul syllable is of type "L", "V", "T", "LV" or "LVT". For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "HangulSyllableType.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") syllable_type, _ = map(six.text_type.strip, data[1].split("#")) if ".." in data[0]: # If it is a range and not a single value start, end = map(lambda x: int(x, 16), data[0].strip().split("..")) for idx in range(start, end + 1): _hangul_syllable_types[idx] = syllable_type else: _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type def _load_jamo_short_names(): """ Function for parsing the Jamo short names from the Unicode Character Database (UCD) and generating a lookup table For more info on how this is used, see the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior" and ch. 04, section 4.8, "Name". https://www.unicode.org/versions/latest/ch03.pdf https://www.unicode.org/versions/latest/ch04.pdf """ filename = "Jamo.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") code = int(data[0].strip(), 16) char_info = data[1].split("#") short_name = char_info[0].strip() _jamo_short_names[code] = short_name def _is_hangul_syllable(i): """ Function for determining if a Unicode scalar value i is within the range of Hangul syllables. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt return True return False def _is_jamo(i): """ Function for determining if a Unicode scalar value i is within the range of Jamo. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0x1100, 0x11ff + 1): # Range of Jamo as defined in Blocks.txt, "1100..11FF; Hangul Jamo" return True return False def _get_hangul_syllable_type(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT") """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable] def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo] #: Common constants for decomposing and composing Hangul syllables S_BASE = 0xAC00 # U+AC00, start of Hangul syllable range L_BASE = 0x1100 # U+1100, start of Hangul leading consonant / syllable-initial range i.e. Hangul Choseong V_BASE = 0x1161 # U+1161, start of Hangul vowel / syllable-peak range i.e Hangul Jungseong T_BASE = 0x11a7 # U+11A7, start of Hangul trailing consonant / syllable-final range i.e. Hangul Jongseong L_COUNT = 19 # Count of Hangul Choseong V_COUNT = 21 # Count of Hangul Jungseong T_COUNT = 28 # Count of Hangul Jongseong + 1 N_COUNT = V_COUNT * T_COUNT S_COUNT = L_COUNT * N_COUNT def compose_hangul_syllable(jamo): """ Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable """ fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) def _get_hangul_syllable_name(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
leonidessaguisagjr/unicodeutil
unicodeutil/hangulutil.py
_get_hangul_syllable_name
python
def _get_hangul_syllable_name(hangul_syllable): if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") jamo = decompose_hangul_syllable(hangul_syllable, fully_decompose=True) result = '' for j in jamo: if j is not None: result += _get_jamo_short_name(j) return result
Function for taking a Unicode scalar value representing a Hangul syllable and converting it to its syllable name as defined by the Unicode naming rule NR1. See the Unicode Standard, ch. 04, section 4.8, Names, for more information. :param hangul_syllable: Unicode scalar value representing the Hangul syllable to convert :return: String representing its syllable name as transformed according to naming rule NR1.
train
https://github.com/leonidessaguisagjr/unicodeutil/blob/c25c882cf9cb38c123df49fad365be67e5818928/unicodeutil/hangulutil.py#L215-L230
[ "def decompose_hangul_syllable(hangul_syllable, fully_decompose=False):\n \"\"\"\n Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple\n representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value\n passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be\n raised.\n\n The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12,\n \"Conjoining Jamo Behavior\".\n\n Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default)\n U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition)\n\n :param hangul_syllable: Unicode scalar value for Hangul syllable\n :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is\n fully_decompose=False) or a full canonical decomposition (fully_decompose=True)\n :return: Tuple of Unicode scalar values for the decomposed Jamo.\n \"\"\"\n if not _is_hangul_syllable(hangul_syllable):\n raise ValueError(\"Value passed in does not represent a Hangul syllable!\")\n s_index = hangul_syllable - S_BASE\n\n if fully_decompose:\n l_index = s_index // N_COUNT\n v_index = (s_index % N_COUNT) // T_COUNT\n t_index = s_index % T_COUNT\n l_part = L_BASE + l_index\n v_part = V_BASE + v_index\n t_part = (T_BASE + t_index) if t_index > 0 else None\n return l_part, v_part, t_part\n else:\n if _get_hangul_syllable_type(hangul_syllable) == \"LV\": # Hangul_Syllable_Type = LV\n l_index = s_index // N_COUNT\n v_index = (s_index % N_COUNT) // T_COUNT\n l_part = L_BASE + l_index\n v_part = V_BASE + v_index\n return l_part, v_part\n else: # Assume Hangul_Syllable_Type = LVT\n lv_index = (s_index // T_COUNT) * T_COUNT\n t_index = s_index % T_COUNT\n lv_part = S_BASE + lv_index\n t_part = T_BASE + t_index\n return lv_part, t_part\n", "def _is_hangul_syllable(i):\n \"\"\"\n Function for determining if a Unicode scalar value i is within the range of Hangul syllables.\n\n :param i: Unicode scalar value to lookup\n :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False.\n \"\"\"\n if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt\n return True\n return False\n", "def _get_jamo_short_name(jamo):\n \"\"\"\n Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its\n Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard,\n ch. 03, section 3.12, Conjoining Jamo Behavior.\n\n https://www.unicode.org/versions/latest/ch03.pdf\n\n :param jamo: Unicode scalar value representing a Jamo\n :return: Returns a string representing its Jamo_Short_Name property\n \"\"\"\n if not _is_jamo(jamo):\n raise ValueError(\"Value 0x%0.4x passed in does not represent a Jamo!\" % jamo)\n if not _jamo_short_names:\n _load_jamo_short_names()\n return _jamo_short_names[jamo]\n" ]
import codecs import os import six _hangul_syllable_types = {} _jamo_short_names = {} def _load_hangul_syllable_types(): """ Helper function for parsing the contents of "HangulSyllableType.txt" from the Unicode Character Database (UCD) and generating a lookup table for determining whether or not a given Hangul syllable is of type "L", "V", "T", "LV" or "LVT". For more info on the UCD, see the following website: https://www.unicode.org/ucd/ """ filename = "HangulSyllableType.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") syllable_type, _ = map(six.text_type.strip, data[1].split("#")) if ".." in data[0]: # If it is a range and not a single value start, end = map(lambda x: int(x, 16), data[0].strip().split("..")) for idx in range(start, end + 1): _hangul_syllable_types[idx] = syllable_type else: _hangul_syllable_types[int(data[0].strip(), 16)] = syllable_type def _load_jamo_short_names(): """ Function for parsing the Jamo short names from the Unicode Character Database (UCD) and generating a lookup table For more info on how this is used, see the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior" and ch. 04, section 4.8, "Name". https://www.unicode.org/versions/latest/ch03.pdf https://www.unicode.org/versions/latest/ch04.pdf """ filename = "Jamo.txt" current_dir = os.path.abspath(os.path.dirname(__file__)) with codecs.open(os.path.join(current_dir, filename), mode="r", encoding="utf-8") as fp: for line in fp: if not line.strip() or line.startswith("#"): continue # Skip empty lines or lines that are comments (comments start with '#') data = line.strip().split(";") code = int(data[0].strip(), 16) char_info = data[1].split("#") short_name = char_info[0].strip() _jamo_short_names[code] = short_name def _is_hangul_syllable(i): """ Function for determining if a Unicode scalar value i is within the range of Hangul syllables. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0xAC00, 0xD7A3 + 1): # Range of Hangul characters as defined in UnicodeData.txt return True return False def _is_jamo(i): """ Function for determining if a Unicode scalar value i is within the range of Jamo. :param i: Unicode scalar value to lookup :return: Boolean: True if the lookup value is within the range of Hangul syllables, otherwise False. """ if i in range(0x1100, 0x11ff + 1): # Range of Jamo as defined in Blocks.txt, "1100..11FF; Hangul Jamo" return True return False def _get_hangul_syllable_type(hangul_syllable): """ Function for taking a Unicode scalar value representing a Hangul syllable and determining the correct value for its Hangul_Syllable_Type property. For more information on the Hangul_Syllable_Type property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param hangul_syllable: Unicode scalar value representing a Hangul syllable :return: Returns a string representing its Hangul_Syllable_Type property ("L", "V", "T", "LV" or "LVT") """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value 0x%0.4x does not represent a Hangul syllable!" % hangul_syllable) if not _hangul_syllable_types: _load_hangul_syllable_types() return _hangul_syllable_types[hangul_syllable] def _get_jamo_short_name(jamo): """ Function for taking a Unicode scalar value representing a Jamo and determining the correct value for its Jamo_Short_Name property. For more information on the Jamo_Short_Name property see the Unicode Standard, ch. 03, section 3.12, Conjoining Jamo Behavior. https://www.unicode.org/versions/latest/ch03.pdf :param jamo: Unicode scalar value representing a Jamo :return: Returns a string representing its Jamo_Short_Name property """ if not _is_jamo(jamo): raise ValueError("Value 0x%0.4x passed in does not represent a Jamo!" % jamo) if not _jamo_short_names: _load_jamo_short_names() return _jamo_short_names[jamo] #: Common constants for decomposing and composing Hangul syllables S_BASE = 0xAC00 # U+AC00, start of Hangul syllable range L_BASE = 0x1100 # U+1100, start of Hangul leading consonant / syllable-initial range i.e. Hangul Choseong V_BASE = 0x1161 # U+1161, start of Hangul vowel / syllable-peak range i.e Hangul Jungseong T_BASE = 0x11a7 # U+11A7, start of Hangul trailing consonant / syllable-final range i.e. Hangul Jongseong L_COUNT = 19 # Count of Hangul Choseong V_COUNT = 21 # Count of Hangul Jungseong T_COUNT = 28 # Count of Hangul Jongseong + 1 N_COUNT = V_COUNT * T_COUNT S_COUNT = L_COUNT * N_COUNT def compose_hangul_syllable(jamo): """ Function for taking a tuple or list of Unicode scalar values representing Jamo and composing it into a Hangul syllable. If the values in the list or tuple passed in are not in the ranges of Jamo, a ValueError will be raised. The algorithm for doing the composition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior." Example: (U+1111, U+1171) -> U+D4CC (U+D4CC, U+11B6) -> U+D4DB (U+1111, U+1171, U+11B6) -> U+D4DB :param jamo: Tuple of list of Jamo to compose :return: Composed Hangul syllable """ fmt_str_invalid_sequence = "{0} does not represent a valid sequence of Jamo!" if len(jamo) == 3: l_part, v_part, t_part = jamo if not (l_part in range(0x1100, 0x1112 + 1) and v_part in range(0x1161, 0x1175 + 1) and t_part in range(0x11a8, 0x11c2 + 1)): raise ValueError(fmt_str_invalid_sequence.format(jamo)) l_index = l_part - L_BASE v_index = v_part - V_BASE t_index = t_part - T_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index + t_index elif len(jamo) == 2: if jamo[0] in range(0x1100, 0x1112 + 1) and jamo[1] in range(0x1161, 0x1175 + 1): l_part, v_part = jamo l_index = l_part - L_BASE v_index = v_part - V_BASE lv_index = l_index * N_COUNT + v_index * T_COUNT return S_BASE + lv_index elif _get_hangul_syllable_type(jamo[0]) == "LV" and jamo[1] in range(0x11a8, 0x11c2 + 1): lv_part, t_part = jamo t_index = t_part - T_BASE return lv_part + t_index else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) else: raise ValueError(fmt_str_invalid_sequence.format(jamo)) def decompose_hangul_syllable(hangul_syllable, fully_decompose=False): """ Function for taking a Unicode scalar value representing a Hangul syllable and decomposing it into a tuple representing the scalar values of the decomposed (canonical decomposition) Jamo. If the Unicode scalar value passed in is not in the range of Hangul syllable values (as defined in UnicodeData.txt), a ValueError will be raised. The algorithm for doing the decomposition is described in the Unicode Standard, ch. 03, section 3.12, "Conjoining Jamo Behavior". Example: U+D4DB -> (U+D4CC, U+11B6) # (canonical decomposition, default) U+D4DB -> (U+1111, U+1171, U+11B6) # (full canonical decomposition) :param hangul_syllable: Unicode scalar value for Hangul syllable :param fully_decompose: Boolean indicating whether or not to do a canonical decomposition (default behavior is fully_decompose=False) or a full canonical decomposition (fully_decompose=True) :return: Tuple of Unicode scalar values for the decomposed Jamo. """ if not _is_hangul_syllable(hangul_syllable): raise ValueError("Value passed in does not represent a Hangul syllable!") s_index = hangul_syllable - S_BASE if fully_decompose: l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT t_index = s_index % T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index t_part = (T_BASE + t_index) if t_index > 0 else None return l_part, v_part, t_part else: if _get_hangul_syllable_type(hangul_syllable) == "LV": # Hangul_Syllable_Type = LV l_index = s_index // N_COUNT v_index = (s_index % N_COUNT) // T_COUNT l_part = L_BASE + l_index v_part = V_BASE + v_index return l_part, v_part else: # Assume Hangul_Syllable_Type = LVT lv_index = (s_index // T_COUNT) * T_COUNT t_index = s_index % T_COUNT lv_part = S_BASE + lv_index t_part = T_BASE + t_index return lv_part, t_part
gpoulter/python-ngram
scripts/csvjoin.py
lowstrip
python
def lowstrip(term): term = re.sub('\s+', ' ', term) term = term.lower() return term
Convert to lowercase and strip spaces
train
https://github.com/gpoulter/python-ngram/blob/f8543bdc84a4d24ac60a48b36c4034f881664491/scripts/csvjoin.py#L16-L20
null
#!/usr/bin/python """ Left similarity join between two CSV files. For each row in the first file, take the specified join column and find similar rows in the second file based on ngram similarity to a specified column in the second file. For each resulting pair of rows, output a row consisting of the fields from the first file, a column with the similarity value, and then the fields from the second file. """ from __future__ import print_function import csv, os, re from ngram import NGram def main(left_path, left_column, right_path, right_column, outfile, titles, join, minscore, count, warp): """Perform the similarity join""" right_file = csv.reader(open(right_path, 'r')) if titles: right_header = next(right_file) index = NGram((tuple(r) for r in right_file), threshold=minscore, warp=warp, key=lambda x: lowstrip(x[right_column])) left_file = csv.reader(open(left_path, 'r')) out = csv.writer(open(outfile, 'w'), lineterminator='\n') if titles: left_header = next(left_file) out.writerow(left_header + ["Rank", "Similarity"] + right_header) for row in left_file: if not row: continue # skip blank lines row = tuple(row) results = index.search(lowstrip(row[left_column]), threshold=minscore) if results: if count > 0: results = results[:count] for rank, result in enumerate(results, 1): out.writerow(row + (rank, result[1]) + result[0]) elif join == "outer": out.writerow(row) def console_main(): """Process command-line arguments.""" from argparse import ArgumentParser parser = ArgumentParser(description=__doc__) parser.add_argument('-t', '--titles', action='store_true', help='input files have column titles') parser.add_argument( '-j', '--join', choices=['inner', 'outer'], help=('The kind of left join to perform. Outer join outputs left-hand ' 'rows which have no right hand match, while inner join discards ' 'such rows. Default: %(default)s')) parser.add_argument('-m', '--minscore', type=float, help='Minimum match score: %(default)s') parser.add_argument('-c', '--count', type=int, help='Max number of rows to match (0 for all): %(default)s') parser.add_argument('-w', '--warp', type=float, help='N-gram warp, higher helps short strings: %(default)s') parser.add_argument('left', nargs=1, help='First CSV file') parser.add_argument('leftcolumn', nargs=1, type=int, help='Column in first CSV file') parser.add_argument('right', nargs=1, help='Second CSV file') parser.add_argument('rightcolumn', nargs=1, type=int, help='Column in second CSV file') parser.add_argument('outfile', nargs=1, help='Output CSV file') parser.set_defaults( titles=False, join='outer', minscore=0.24, count=0, warp=1.0) args = parser.parse_args() for path in [args.left[0], args.right[0]]: if not os.path.isfile(path): parser.error('File "%s" does not exist.' % path) if not (0 <= args.minscore <= 1.0): parser.error("Minimum score must be between 0 and 1") if not args.count >= 0: parser.error("Maximum number of matches per row must be non-negative.") if args.count == 0: args.count = None # to return all results main(args.left[0], args.leftcolumn[0], args.right[0], args.rightcolumn[0], args.outfile[0], args.titles, args.join, args.minscore, args.count, args.warp) if __name__ == '__main__': console_main()
gpoulter/python-ngram
scripts/csvjoin.py
main
python
def main(left_path, left_column, right_path, right_column, outfile, titles, join, minscore, count, warp): right_file = csv.reader(open(right_path, 'r')) if titles: right_header = next(right_file) index = NGram((tuple(r) for r in right_file), threshold=minscore, warp=warp, key=lambda x: lowstrip(x[right_column])) left_file = csv.reader(open(left_path, 'r')) out = csv.writer(open(outfile, 'w'), lineterminator='\n') if titles: left_header = next(left_file) out.writerow(left_header + ["Rank", "Similarity"] + right_header) for row in left_file: if not row: continue # skip blank lines row = tuple(row) results = index.search(lowstrip(row[left_column]), threshold=minscore) if results: if count > 0: results = results[:count] for rank, result in enumerate(results, 1): out.writerow(row + (rank, result[1]) + result[0]) elif join == "outer": out.writerow(row)
Perform the similarity join
train
https://github.com/gpoulter/python-ngram/blob/f8543bdc84a4d24ac60a48b36c4034f881664491/scripts/csvjoin.py#L22-L46
[ "def lowstrip(term):\n \"\"\"Convert to lowercase and strip spaces\"\"\"\n term = re.sub('\\s+', ' ', term)\n term = term.lower()\n return term\n" ]
#!/usr/bin/python """ Left similarity join between two CSV files. For each row in the first file, take the specified join column and find similar rows in the second file based on ngram similarity to a specified column in the second file. For each resulting pair of rows, output a row consisting of the fields from the first file, a column with the similarity value, and then the fields from the second file. """ from __future__ import print_function import csv, os, re from ngram import NGram def lowstrip(term): """Convert to lowercase and strip spaces""" term = re.sub('\s+', ' ', term) term = term.lower() return term def console_main(): """Process command-line arguments.""" from argparse import ArgumentParser parser = ArgumentParser(description=__doc__) parser.add_argument('-t', '--titles', action='store_true', help='input files have column titles') parser.add_argument( '-j', '--join', choices=['inner', 'outer'], help=('The kind of left join to perform. Outer join outputs left-hand ' 'rows which have no right hand match, while inner join discards ' 'such rows. Default: %(default)s')) parser.add_argument('-m', '--minscore', type=float, help='Minimum match score: %(default)s') parser.add_argument('-c', '--count', type=int, help='Max number of rows to match (0 for all): %(default)s') parser.add_argument('-w', '--warp', type=float, help='N-gram warp, higher helps short strings: %(default)s') parser.add_argument('left', nargs=1, help='First CSV file') parser.add_argument('leftcolumn', nargs=1, type=int, help='Column in first CSV file') parser.add_argument('right', nargs=1, help='Second CSV file') parser.add_argument('rightcolumn', nargs=1, type=int, help='Column in second CSV file') parser.add_argument('outfile', nargs=1, help='Output CSV file') parser.set_defaults( titles=False, join='outer', minscore=0.24, count=0, warp=1.0) args = parser.parse_args() for path in [args.left[0], args.right[0]]: if not os.path.isfile(path): parser.error('File "%s" does not exist.' % path) if not (0 <= args.minscore <= 1.0): parser.error("Minimum score must be between 0 and 1") if not args.count >= 0: parser.error("Maximum number of matches per row must be non-negative.") if args.count == 0: args.count = None # to return all results main(args.left[0], args.leftcolumn[0], args.right[0], args.rightcolumn[0], args.outfile[0], args.titles, args.join, args.minscore, args.count, args.warp) if __name__ == '__main__': console_main()
gpoulter/python-ngram
scripts/csvjoin.py
console_main
python
def console_main(): from argparse import ArgumentParser parser = ArgumentParser(description=__doc__) parser.add_argument('-t', '--titles', action='store_true', help='input files have column titles') parser.add_argument( '-j', '--join', choices=['inner', 'outer'], help=('The kind of left join to perform. Outer join outputs left-hand ' 'rows which have no right hand match, while inner join discards ' 'such rows. Default: %(default)s')) parser.add_argument('-m', '--minscore', type=float, help='Minimum match score: %(default)s') parser.add_argument('-c', '--count', type=int, help='Max number of rows to match (0 for all): %(default)s') parser.add_argument('-w', '--warp', type=float, help='N-gram warp, higher helps short strings: %(default)s') parser.add_argument('left', nargs=1, help='First CSV file') parser.add_argument('leftcolumn', nargs=1, type=int, help='Column in first CSV file') parser.add_argument('right', nargs=1, help='Second CSV file') parser.add_argument('rightcolumn', nargs=1, type=int, help='Column in second CSV file') parser.add_argument('outfile', nargs=1, help='Output CSV file') parser.set_defaults( titles=False, join='outer', minscore=0.24, count=0, warp=1.0) args = parser.parse_args() for path in [args.left[0], args.right[0]]: if not os.path.isfile(path): parser.error('File "%s" does not exist.' % path) if not (0 <= args.minscore <= 1.0): parser.error("Minimum score must be between 0 and 1") if not args.count >= 0: parser.error("Maximum number of matches per row must be non-negative.") if args.count == 0: args.count = None # to return all results main(args.left[0], args.leftcolumn[0], args.right[0], args.rightcolumn[0], args.outfile[0], args.titles, args.join, args.minscore, args.count, args.warp)
Process command-line arguments.
train
https://github.com/gpoulter/python-ngram/blob/f8543bdc84a4d24ac60a48b36c4034f881664491/scripts/csvjoin.py#L48-L84
[ "def main(left_path, left_column, right_path, right_column,\n outfile, titles, join, minscore, count, warp):\n \"\"\"Perform the similarity join\"\"\"\n right_file = csv.reader(open(right_path, 'r'))\n if titles:\n right_header = next(right_file)\n index = NGram((tuple(r) for r in right_file),\n threshold=minscore,\n warp=warp, key=lambda x: lowstrip(x[right_column]))\n left_file = csv.reader(open(left_path, 'r'))\n out = csv.writer(open(outfile, 'w'), lineterminator='\\n')\n if titles:\n left_header = next(left_file)\n out.writerow(left_header + [\"Rank\", \"Similarity\"] + right_header)\n for row in left_file:\n if not row: continue # skip blank lines\n row = tuple(row)\n results = index.search(lowstrip(row[left_column]), threshold=minscore)\n if results:\n if count > 0:\n results = results[:count]\n for rank, result in enumerate(results, 1):\n out.writerow(row + (rank, result[1]) + result[0])\n elif join == \"outer\":\n out.writerow(row)\n" ]
#!/usr/bin/python """ Left similarity join between two CSV files. For each row in the first file, take the specified join column and find similar rows in the second file based on ngram similarity to a specified column in the second file. For each resulting pair of rows, output a row consisting of the fields from the first file, a column with the similarity value, and then the fields from the second file. """ from __future__ import print_function import csv, os, re from ngram import NGram def lowstrip(term): """Convert to lowercase and strip spaces""" term = re.sub('\s+', ' ', term) term = term.lower() return term def main(left_path, left_column, right_path, right_column, outfile, titles, join, minscore, count, warp): """Perform the similarity join""" right_file = csv.reader(open(right_path, 'r')) if titles: right_header = next(right_file) index = NGram((tuple(r) for r in right_file), threshold=minscore, warp=warp, key=lambda x: lowstrip(x[right_column])) left_file = csv.reader(open(left_path, 'r')) out = csv.writer(open(outfile, 'w'), lineterminator='\n') if titles: left_header = next(left_file) out.writerow(left_header + ["Rank", "Similarity"] + right_header) for row in left_file: if not row: continue # skip blank lines row = tuple(row) results = index.search(lowstrip(row[left_column]), threshold=minscore) if results: if count > 0: results = results[:count] for rank, result in enumerate(results, 1): out.writerow(row + (rank, result[1]) + result[0]) elif join == "outer": out.writerow(row) if __name__ == '__main__': console_main()
gpoulter/python-ngram
ngram.py
NGram.copy
python
def copy(self, items=None): return NGram(items if items is not None else self, self.threshold, self.warp, self._key, self.N, self._pad_len, self._pad_char)
Return a new NGram object with the same settings, and referencing the same items. Copy is shallow in that each item is not recursively copied. Optionally specify alternate items to populate the copy. >>> from ngram import NGram >>> from copy import deepcopy >>> n = NGram(['eggs', 'spam']) >>> m = n.copy() >>> m.add('ham') >>> sorted(list(n)) ['eggs', 'spam'] >>> sorted(list(m)) ['eggs', 'ham', 'spam'] >>> p = n.copy(['foo', 'bar']) >>> sorted(list(p)) ['bar', 'foo']
train
https://github.com/gpoulter/python-ngram/blob/f8543bdc84a4d24ac60a48b36c4034f881664491/ngram.py#L127-L148
null
class NGram(set): """A set that supports searching for members by N-gram string similarity. In Python 2, items should be `unicode` string or a plain ASCII `str` (bytestring) - do not use UTF-8 or other multi-byte encodings, because multi-byte characters will be split up. :type threshold: float in 0.0 ... 1.0 :param threshold: minimum similarity for a string to be considered a match. :type warp: float in 1.0 ... 3.0 :param warp: use warp greater than 1.0 to increase the similarity of \ shorter string pairs. :type items: [item, ...] :param items: iteration of items to index for N-gram search. :type N: int >= 2 :param N: number of characters per n-gram. :type pad_len: int in 0 ... N-1 :param pad_len: how many characters padding to add (defaults to N-1). :type pad_char: str or unicode :param pad_char: character to use for padding. Default is '$', but \ consider using the\ non-breaking space character, ``u'\\xa0'`` \ (``u"\\u00A0"``). :type key: function(item) -> str/unicode :param key: Function to convert items into string, default is no \ conversion. Recommended to use `str` or `unicode` for non-string items. \ Using anonymous function prevents NGram class from being pickled. Instance variables: :ivar _grams: For each n-gram, the items containing it and the number of \ times\ the n-gram occurs in the item as ``{str:{item:int, ...}, ...}``. :ivar length: maps items to length of the padded string representations \ as ``{item:int, ...}``. """ def __init__(self, items=None, threshold=0.0, warp=1.0, key=None, N=3, pad_len=None, pad_char='$', **kwargs): super(NGram, self).__init__() if not (0 <= threshold <= 1): raise ValueError("threshold out of range 0.0 to 1.0: " + repr(threshold)) if not (1.0 <= warp <= 3.0): raise ValueError( "warp out of range 1.0 to 3.0: " + repr(warp)) if not N >= 1: raise ValueError("N out of range (should be N >= 1): " + repr(N)) if pad_len is None: pad_len = N - 1 if not (0 <= pad_len < N): raise ValueError("pad_len out of range: " + repr(pad_len)) if not len(pad_char) == 1: raise ValueError( "pad_char is not single character: " + repr(pad_char)) if key is not None and not callable(key): raise ValueError("key is not a function: " + repr(key)) self.threshold = threshold self.warp = warp self.N = N self._pad_len = pad_len self._pad_char = pad_char self._padding = pad_char * pad_len # derive a padding string # compatibility shim for 3.1 iconv parameter if 'iconv' in kwargs: self._key = kwargs.pop('iconv') warnings.warn('"iconv" parameter deprecated, use "key" instead.', DeprecationWarning) # no longer support 3.1 qconv parameter if 'qconv' in kwargs: raise ValueError('qconv query conversion parameter unsupported. ' 'Please process query to a string before calling .search') self._key = key self._grams = {} self.length = {} if items: self.update(items) def __reduce__(self): """Return state information for pickling, no references to this instance. The key function must be None, a builtin function, or a named module-level function. >>> from ngram import NGram >>> n = NGram([0xDEAD, 0xBEEF], key=hex) >>> import pickle >>> p = pickle.dumps(n) >>> m = pickle.loads(p) >>> sorted(list(m)) [48879, 57005] """ return NGram, (list(self), self.threshold, self.warp, self._key, self.N, self._pad_len, self._pad_char) def key(self, item): """Get the key string for the item. >>> from ngram import NGram >>> n = NGram(key=lambda x:x[1]) >>> n.key((3,"ham")) 'ham' """ return self._key(item) if self._key else item def pad(self, string): """Pad a string in preparation for splitting into ngrams. >>> from ngram import NGram >>> n = NGram() >>> n.pad('ham') '$$ham$$' """ return self._padding + string + self._padding def _split(self, string): """Iterates over the ngrams of a string (no padding). >>> from ngram import NGram >>> n = NGram() >>> list(n._split("hamegg")) ['ham', 'ame', 'meg', 'egg'] """ for i in range(len(string) - self.N + 1): yield string[i:i + self.N] def split(self, string): """Pads a string and iterates over its ngrams. >>> from ngram import NGram >>> n = NGram() >>> list(n.split("ham")) ['$$h', '$ha', 'ham', 'am$', 'm$$'] """ return self._split(self.pad(string)) def ngrams(self, string): """Alias for 3.1 compatibility, please set pad_len=0 and use split.""" warnings.warn('Method ngram deprecated, use method split with pad_len=0 instead.', DeprecationWarning) return self._split(string) def ngrams_pad(self, string): """Alias for 3.1 compatibility, please use split instead.""" warnings.warn('Method ngrams_pad deprecated, use method split instead.', DeprecationWarning) return self.split(string) def splititem(self, item): """Pads the string key of an item and iterates over its ngrams. >>> from ngram import NGram >>> n = NGram(key=lambda x:x[1]) >>> item = (3,"ham") >>> list(n.splititem(item)) ['$$h', '$ha', 'ham', 'am$', 'm$$'] """ return self.split(self.key(item)) def add(self, item): """Add an item to the N-gram index (if it has not already been added). >>> from ngram import NGram >>> n = NGram() >>> n.add("ham") >>> list(n) ['ham'] >>> n.add("spam") >>> sorted(list(n)) ['ham', 'spam'] """ if item not in self: # Add the item to the base set super(NGram, self).add(item) # Record length of padded string padded_item = self.pad(self.key(item)) self.length[item] = len(padded_item) for ngram in self._split(padded_item): # Add a new n-gram and string to index if necessary self._grams.setdefault(ngram, {}).setdefault(item, 0) # Increment number of times the n-gram appears in the string self._grams[ngram][item] += 1 def remove(self, item): """Remove an item from the set. Inverts the add operation. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> n.remove('spam') >>> list(n) ['eggs'] """ if item in self: super(NGram, self).remove(item) del self.length[item] for ngram in set(self.splititem(item)): del self._grams[ngram][item] def pop(self): """Remove and return an arbitrary set element. Raises KeyError if the set is empty. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> x = n.pop() >>> len(n) 1 """ item = super(NGram, self).pop() del self.length[item] for ngram in set(self.splititem(item)): del self._grams[ngram][item] return item def items_sharing_ngrams(self, query): """Retrieve the subset of items that share n-grams the query string. :param query: look up items that share N-grams with this string. :return: mapping from matched string to the number of shared N-grams. >>> from ngram import NGram >>> n = NGram(["ham","spam","eggs"]) >>> sorted(n.items_sharing_ngrams("mam").items()) [('ham', 2), ('spam', 2)] """ # From matched string to number of N-grams shared with query string shared = {} # Dictionary mapping n-gram to string to number of occurrences of that # ngram in the string that remain to be matched. remaining = {} for ngram in self.split(query): try: for match, count in self._grams[ngram].items(): remaining.setdefault(ngram, {}).setdefault(match, count) # match as many occurrences as exist in matched string if remaining[ngram][match] > 0: remaining[ngram][match] -= 1 shared.setdefault(match, 0) shared[match] += 1 except KeyError: pass return shared def searchitem(self, item, threshold=None): """Search the index for items whose key exceeds the threshold similarity to the key of the given item. :return: list of pairs of (item, similarity) by decreasing similarity. >>> from ngram import NGram >>> n = NGram([(0, "SPAM"), (1, "SPAN"), (2, "EG"), ... (3, "SPANN")], key=lambda x:x[1]) >>> sorted(n.searchitem((2, "SPA"), 0.35)) [((0, 'SPAM'), 0.375), ((1, 'SPAN'), 0.375)] """ return self.search(self.key(item), threshold) def search(self, query, threshold=None): """Search the index for items whose key exceeds threshold similarity to the query string. :param query: returned items will have at least `threshold` \ similarity to the query string. :return: list of pairs of (item, similarity) by decreasing similarity. >>> from ngram import NGram >>> n = NGram([(0, "SPAM"), (1, "SPAN"), (2, "EG")], key=lambda x:x[1]) >>> sorted(n.search("SPA")) [((0, 'SPAM'), 0.375), ((1, 'SPAN'), 0.375)] >>> n.search("M") [((0, 'SPAM'), 0.125)] >>> n.search("EG") [((2, 'EG'), 1.0)] """ threshold = threshold if threshold is not None else self.threshold results = [] # Identify possible results for match, samegrams in self.items_sharing_ngrams(query).items(): allgrams = (len(self.pad(query)) + self.length[match] - (2 * self.N) - samegrams + 2) similarity = self.ngram_similarity(samegrams, allgrams, self.warp) if similarity >= threshold: results.append((match, similarity)) # Sort results by decreasing similarity results.sort(key=lambda x: x[1], reverse=True) return results def finditem(self, item, threshold=None): """Return most similar item to the provided one, or None if nothing exceeds the threshold. >>> from ngram import NGram >>> n = NGram([(0, "Spam"), (1, "Ham"), (2, "Eggsy"), (3, "Egggsy")], ... key=lambda x:x[1].lower()) >>> n.finditem((3, 'Hom')) (1, 'Ham') >>> n.finditem((4, "Oggsy")) (2, 'Eggsy') >>> n.finditem((4, "Oggsy"), 0.8) """ results = self.searchitem(item, threshold) if results: return results[0][0] else: return None def find(self, query, threshold=None): """Simply return the best match to the query, None on no match. >>> from ngram import NGram >>> n = NGram(["Spam","Eggs","Ham"], key=lambda x:x.lower(), N=1) >>> n.find('Hom') 'Ham' >>> n.find("Spom") 'Spam' >>> n.find("Spom", 0.8) """ results = self.search(query, threshold) if results: return results[0][0] else: return None @staticmethod def ngram_similarity(samegrams, allgrams, warp=1.0): """Similarity for two sets of n-grams. :note: ``similarity = (a**e - d**e)/a**e`` where `a` is \ "all n-grams", `d` is "different n-grams" and `e` is the warp. :param samegrams: number of n-grams shared by the two strings. :param allgrams: total of the distinct n-grams across the two strings. :return: similarity in the range 0.0 to 1.0. >>> from ngram import NGram >>> NGram.ngram_similarity(5, 10) 0.5 >>> NGram.ngram_similarity(5, 10, warp=2) 0.75 >>> NGram.ngram_similarity(5, 10, warp=3) 0.875 >>> NGram.ngram_similarity(2, 4, warp=2) 0.75 >>> NGram.ngram_similarity(3, 4) 0.75 """ if abs(warp - 1.0) < 1e-9: similarity = float(samegrams) / allgrams else: diffgrams = float(allgrams - samegrams) similarity = ((allgrams ** warp - diffgrams ** warp) / (allgrams ** warp)) return similarity @staticmethod def compare(s1, s2, **kwargs): """Compares two strings and returns their similarity. :param s1: first string :param s2: second string :param kwargs: additional keyword arguments passed to __init__. :return: similarity between 0.0 and 1.0. >>> from ngram import NGram >>> NGram.compare('spa', 'spam') 0.375 >>> NGram.compare('ham', 'bam') 0.25 >>> NGram.compare('spam', 'pam') #N=2 0.375 >>> NGram.compare('ham', 'ams', N=1) 0.5 """ if s1 is None or s2 is None: if s1 == s2: return 1.0 return 0.0 try: return NGram([s1], **kwargs).search(s2)[0][1] except IndexError: return 0.0 ### Set operations implemented on top of NGram add/remove def update(self, items): """Update the set with new items. >>> from ngram import NGram >>> n = NGram(["spam"]) >>> n.update(["eggs"]) >>> sorted(list(n)) ['eggs', 'spam'] """ for item in items: self.add(item) def discard(self, item): """Remove an element from a set if it is a member. If the element is not a member, do nothing. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> n.discard('spam') >>> n.discard('ham') >>> list(n) ['eggs'] """ if item in self: self.remove(item) def clear(self): """Remove all elements from this set. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> sorted(list(n)) ['eggs', 'spam'] >>> n.clear() >>> list(n) [] """ super(NGram, self).clear() self._grams = {} self.length = {} def union(self, *others): """Return the union of two or more sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> sorted(list(a.union(b))) ['eggs', 'ham', 'spam'] """ return self.copy(super(NGram, self).union(*others)) def difference(self, *others): """Return the difference of two or more sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> list(a.difference(b)) ['eggs'] """ return self.copy(super(NGram, self).difference(*others)) def difference_update(self, other): """Remove from this set all elements from `other` set. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> other = set(['spam']) >>> n.difference_update(other) >>> list(n) ['eggs'] """ for item in other: self.discard(item) def intersection(self, *others): """Return the intersection of two or more sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> list(a.intersection(b)) ['spam'] """ return self.copy(super(NGram, self).intersection(*others)) def intersection_update(self, *others): """Update the set with the intersection of itself and other sets. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> other = set(['spam', 'ham']) >>> n.intersection_update(other) >>> list(n) ['spam'] """ self.difference_update(super(NGram, self).difference(*others)) def symmetric_difference(self, other): """Return the symmetric difference of two sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> sorted(list(a.symmetric_difference(b))) ['eggs', 'ham'] """ return self.copy(super(NGram, self).symmetric_difference(other)) def symmetric_difference_update(self, other): """Update the set with the symmetric difference of itself and `other`. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> other = set(['spam', 'ham']) >>> n.symmetric_difference_update(other) >>> sorted(list(n)) ['eggs', 'ham'] """ intersection = super(NGram, self).intersection(other) self.update(other) # add items present in other self.difference_update(intersection) # remove items present in both
gpoulter/python-ngram
ngram.py
NGram._split
python
def _split(self, string): for i in range(len(string) - self.N + 1): yield string[i:i + self.N]
Iterates over the ngrams of a string (no padding). >>> from ngram import NGram >>> n = NGram() >>> list(n._split("hamegg")) ['ham', 'ame', 'meg', 'egg']
train
https://github.com/gpoulter/python-ngram/blob/f8543bdc84a4d24ac60a48b36c4034f881664491/ngram.py#L170-L179
null
class NGram(set): """A set that supports searching for members by N-gram string similarity. In Python 2, items should be `unicode` string or a plain ASCII `str` (bytestring) - do not use UTF-8 or other multi-byte encodings, because multi-byte characters will be split up. :type threshold: float in 0.0 ... 1.0 :param threshold: minimum similarity for a string to be considered a match. :type warp: float in 1.0 ... 3.0 :param warp: use warp greater than 1.0 to increase the similarity of \ shorter string pairs. :type items: [item, ...] :param items: iteration of items to index for N-gram search. :type N: int >= 2 :param N: number of characters per n-gram. :type pad_len: int in 0 ... N-1 :param pad_len: how many characters padding to add (defaults to N-1). :type pad_char: str or unicode :param pad_char: character to use for padding. Default is '$', but \ consider using the\ non-breaking space character, ``u'\\xa0'`` \ (``u"\\u00A0"``). :type key: function(item) -> str/unicode :param key: Function to convert items into string, default is no \ conversion. Recommended to use `str` or `unicode` for non-string items. \ Using anonymous function prevents NGram class from being pickled. Instance variables: :ivar _grams: For each n-gram, the items containing it and the number of \ times\ the n-gram occurs in the item as ``{str:{item:int, ...}, ...}``. :ivar length: maps items to length of the padded string representations \ as ``{item:int, ...}``. """ def __init__(self, items=None, threshold=0.0, warp=1.0, key=None, N=3, pad_len=None, pad_char='$', **kwargs): super(NGram, self).__init__() if not (0 <= threshold <= 1): raise ValueError("threshold out of range 0.0 to 1.0: " + repr(threshold)) if not (1.0 <= warp <= 3.0): raise ValueError( "warp out of range 1.0 to 3.0: " + repr(warp)) if not N >= 1: raise ValueError("N out of range (should be N >= 1): " + repr(N)) if pad_len is None: pad_len = N - 1 if not (0 <= pad_len < N): raise ValueError("pad_len out of range: " + repr(pad_len)) if not len(pad_char) == 1: raise ValueError( "pad_char is not single character: " + repr(pad_char)) if key is not None and not callable(key): raise ValueError("key is not a function: " + repr(key)) self.threshold = threshold self.warp = warp self.N = N self._pad_len = pad_len self._pad_char = pad_char self._padding = pad_char * pad_len # derive a padding string # compatibility shim for 3.1 iconv parameter if 'iconv' in kwargs: self._key = kwargs.pop('iconv') warnings.warn('"iconv" parameter deprecated, use "key" instead.', DeprecationWarning) # no longer support 3.1 qconv parameter if 'qconv' in kwargs: raise ValueError('qconv query conversion parameter unsupported. ' 'Please process query to a string before calling .search') self._key = key self._grams = {} self.length = {} if items: self.update(items) def __reduce__(self): """Return state information for pickling, no references to this instance. The key function must be None, a builtin function, or a named module-level function. >>> from ngram import NGram >>> n = NGram([0xDEAD, 0xBEEF], key=hex) >>> import pickle >>> p = pickle.dumps(n) >>> m = pickle.loads(p) >>> sorted(list(m)) [48879, 57005] """ return NGram, (list(self), self.threshold, self.warp, self._key, self.N, self._pad_len, self._pad_char) def copy(self, items=None): """Return a new NGram object with the same settings, and referencing the same items. Copy is shallow in that each item is not recursively copied. Optionally specify alternate items to populate the copy. >>> from ngram import NGram >>> from copy import deepcopy >>> n = NGram(['eggs', 'spam']) >>> m = n.copy() >>> m.add('ham') >>> sorted(list(n)) ['eggs', 'spam'] >>> sorted(list(m)) ['eggs', 'ham', 'spam'] >>> p = n.copy(['foo', 'bar']) >>> sorted(list(p)) ['bar', 'foo'] """ return NGram(items if items is not None else self, self.threshold, self.warp, self._key, self.N, self._pad_len, self._pad_char) def key(self, item): """Get the key string for the item. >>> from ngram import NGram >>> n = NGram(key=lambda x:x[1]) >>> n.key((3,"ham")) 'ham' """ return self._key(item) if self._key else item def pad(self, string): """Pad a string in preparation for splitting into ngrams. >>> from ngram import NGram >>> n = NGram() >>> n.pad('ham') '$$ham$$' """ return self._padding + string + self._padding def split(self, string): """Pads a string and iterates over its ngrams. >>> from ngram import NGram >>> n = NGram() >>> list(n.split("ham")) ['$$h', '$ha', 'ham', 'am$', 'm$$'] """ return self._split(self.pad(string)) def ngrams(self, string): """Alias for 3.1 compatibility, please set pad_len=0 and use split.""" warnings.warn('Method ngram deprecated, use method split with pad_len=0 instead.', DeprecationWarning) return self._split(string) def ngrams_pad(self, string): """Alias for 3.1 compatibility, please use split instead.""" warnings.warn('Method ngrams_pad deprecated, use method split instead.', DeprecationWarning) return self.split(string) def splititem(self, item): """Pads the string key of an item and iterates over its ngrams. >>> from ngram import NGram >>> n = NGram(key=lambda x:x[1]) >>> item = (3,"ham") >>> list(n.splititem(item)) ['$$h', '$ha', 'ham', 'am$', 'm$$'] """ return self.split(self.key(item)) def add(self, item): """Add an item to the N-gram index (if it has not already been added). >>> from ngram import NGram >>> n = NGram() >>> n.add("ham") >>> list(n) ['ham'] >>> n.add("spam") >>> sorted(list(n)) ['ham', 'spam'] """ if item not in self: # Add the item to the base set super(NGram, self).add(item) # Record length of padded string padded_item = self.pad(self.key(item)) self.length[item] = len(padded_item) for ngram in self._split(padded_item): # Add a new n-gram and string to index if necessary self._grams.setdefault(ngram, {}).setdefault(item, 0) # Increment number of times the n-gram appears in the string self._grams[ngram][item] += 1 def remove(self, item): """Remove an item from the set. Inverts the add operation. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> n.remove('spam') >>> list(n) ['eggs'] """ if item in self: super(NGram, self).remove(item) del self.length[item] for ngram in set(self.splititem(item)): del self._grams[ngram][item] def pop(self): """Remove and return an arbitrary set element. Raises KeyError if the set is empty. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> x = n.pop() >>> len(n) 1 """ item = super(NGram, self).pop() del self.length[item] for ngram in set(self.splititem(item)): del self._grams[ngram][item] return item def items_sharing_ngrams(self, query): """Retrieve the subset of items that share n-grams the query string. :param query: look up items that share N-grams with this string. :return: mapping from matched string to the number of shared N-grams. >>> from ngram import NGram >>> n = NGram(["ham","spam","eggs"]) >>> sorted(n.items_sharing_ngrams("mam").items()) [('ham', 2), ('spam', 2)] """ # From matched string to number of N-grams shared with query string shared = {} # Dictionary mapping n-gram to string to number of occurrences of that # ngram in the string that remain to be matched. remaining = {} for ngram in self.split(query): try: for match, count in self._grams[ngram].items(): remaining.setdefault(ngram, {}).setdefault(match, count) # match as many occurrences as exist in matched string if remaining[ngram][match] > 0: remaining[ngram][match] -= 1 shared.setdefault(match, 0) shared[match] += 1 except KeyError: pass return shared def searchitem(self, item, threshold=None): """Search the index for items whose key exceeds the threshold similarity to the key of the given item. :return: list of pairs of (item, similarity) by decreasing similarity. >>> from ngram import NGram >>> n = NGram([(0, "SPAM"), (1, "SPAN"), (2, "EG"), ... (3, "SPANN")], key=lambda x:x[1]) >>> sorted(n.searchitem((2, "SPA"), 0.35)) [((0, 'SPAM'), 0.375), ((1, 'SPAN'), 0.375)] """ return self.search(self.key(item), threshold) def search(self, query, threshold=None): """Search the index for items whose key exceeds threshold similarity to the query string. :param query: returned items will have at least `threshold` \ similarity to the query string. :return: list of pairs of (item, similarity) by decreasing similarity. >>> from ngram import NGram >>> n = NGram([(0, "SPAM"), (1, "SPAN"), (2, "EG")], key=lambda x:x[1]) >>> sorted(n.search("SPA")) [((0, 'SPAM'), 0.375), ((1, 'SPAN'), 0.375)] >>> n.search("M") [((0, 'SPAM'), 0.125)] >>> n.search("EG") [((2, 'EG'), 1.0)] """ threshold = threshold if threshold is not None else self.threshold results = [] # Identify possible results for match, samegrams in self.items_sharing_ngrams(query).items(): allgrams = (len(self.pad(query)) + self.length[match] - (2 * self.N) - samegrams + 2) similarity = self.ngram_similarity(samegrams, allgrams, self.warp) if similarity >= threshold: results.append((match, similarity)) # Sort results by decreasing similarity results.sort(key=lambda x: x[1], reverse=True) return results def finditem(self, item, threshold=None): """Return most similar item to the provided one, or None if nothing exceeds the threshold. >>> from ngram import NGram >>> n = NGram([(0, "Spam"), (1, "Ham"), (2, "Eggsy"), (3, "Egggsy")], ... key=lambda x:x[1].lower()) >>> n.finditem((3, 'Hom')) (1, 'Ham') >>> n.finditem((4, "Oggsy")) (2, 'Eggsy') >>> n.finditem((4, "Oggsy"), 0.8) """ results = self.searchitem(item, threshold) if results: return results[0][0] else: return None def find(self, query, threshold=None): """Simply return the best match to the query, None on no match. >>> from ngram import NGram >>> n = NGram(["Spam","Eggs","Ham"], key=lambda x:x.lower(), N=1) >>> n.find('Hom') 'Ham' >>> n.find("Spom") 'Spam' >>> n.find("Spom", 0.8) """ results = self.search(query, threshold) if results: return results[0][0] else: return None @staticmethod def ngram_similarity(samegrams, allgrams, warp=1.0): """Similarity for two sets of n-grams. :note: ``similarity = (a**e - d**e)/a**e`` where `a` is \ "all n-grams", `d` is "different n-grams" and `e` is the warp. :param samegrams: number of n-grams shared by the two strings. :param allgrams: total of the distinct n-grams across the two strings. :return: similarity in the range 0.0 to 1.0. >>> from ngram import NGram >>> NGram.ngram_similarity(5, 10) 0.5 >>> NGram.ngram_similarity(5, 10, warp=2) 0.75 >>> NGram.ngram_similarity(5, 10, warp=3) 0.875 >>> NGram.ngram_similarity(2, 4, warp=2) 0.75 >>> NGram.ngram_similarity(3, 4) 0.75 """ if abs(warp - 1.0) < 1e-9: similarity = float(samegrams) / allgrams else: diffgrams = float(allgrams - samegrams) similarity = ((allgrams ** warp - diffgrams ** warp) / (allgrams ** warp)) return similarity @staticmethod def compare(s1, s2, **kwargs): """Compares two strings and returns their similarity. :param s1: first string :param s2: second string :param kwargs: additional keyword arguments passed to __init__. :return: similarity between 0.0 and 1.0. >>> from ngram import NGram >>> NGram.compare('spa', 'spam') 0.375 >>> NGram.compare('ham', 'bam') 0.25 >>> NGram.compare('spam', 'pam') #N=2 0.375 >>> NGram.compare('ham', 'ams', N=1) 0.5 """ if s1 is None or s2 is None: if s1 == s2: return 1.0 return 0.0 try: return NGram([s1], **kwargs).search(s2)[0][1] except IndexError: return 0.0 ### Set operations implemented on top of NGram add/remove def update(self, items): """Update the set with new items. >>> from ngram import NGram >>> n = NGram(["spam"]) >>> n.update(["eggs"]) >>> sorted(list(n)) ['eggs', 'spam'] """ for item in items: self.add(item) def discard(self, item): """Remove an element from a set if it is a member. If the element is not a member, do nothing. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> n.discard('spam') >>> n.discard('ham') >>> list(n) ['eggs'] """ if item in self: self.remove(item) def clear(self): """Remove all elements from this set. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> sorted(list(n)) ['eggs', 'spam'] >>> n.clear() >>> list(n) [] """ super(NGram, self).clear() self._grams = {} self.length = {} def union(self, *others): """Return the union of two or more sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> sorted(list(a.union(b))) ['eggs', 'ham', 'spam'] """ return self.copy(super(NGram, self).union(*others)) def difference(self, *others): """Return the difference of two or more sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> list(a.difference(b)) ['eggs'] """ return self.copy(super(NGram, self).difference(*others)) def difference_update(self, other): """Remove from this set all elements from `other` set. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> other = set(['spam']) >>> n.difference_update(other) >>> list(n) ['eggs'] """ for item in other: self.discard(item) def intersection(self, *others): """Return the intersection of two or more sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> list(a.intersection(b)) ['spam'] """ return self.copy(super(NGram, self).intersection(*others)) def intersection_update(self, *others): """Update the set with the intersection of itself and other sets. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> other = set(['spam', 'ham']) >>> n.intersection_update(other) >>> list(n) ['spam'] """ self.difference_update(super(NGram, self).difference(*others)) def symmetric_difference(self, other): """Return the symmetric difference of two sets as a new set. >>> from ngram import NGram >>> a = NGram(['spam', 'eggs']) >>> b = NGram(['spam', 'ham']) >>> sorted(list(a.symmetric_difference(b))) ['eggs', 'ham'] """ return self.copy(super(NGram, self).symmetric_difference(other)) def symmetric_difference_update(self, other): """Update the set with the symmetric difference of itself and `other`. >>> from ngram import NGram >>> n = NGram(['spam', 'eggs']) >>> other = set(['spam', 'ham']) >>> n.symmetric_difference_update(other) >>> sorted(list(n)) ['eggs', 'ham'] """ intersection = super(NGram, self).intersection(other) self.update(other) # add items present in other self.difference_update(intersection) # remove items present in both