id int64 0 190k | prompt stringlengths 21 13.4M | docstring stringlengths 1 12k ⌀ |
|---|---|---|
155,573 | import functools
import inspect
import runpy
import sys
import time
from pathlib import Path
from typing import Any, Optional
def get_default_args(func):
signature = inspect.signature(func)
return {k: v.default for k, v in signature.parameters.items() if v.default is not inspect.Parameter.empty}
The provided code snippet includes necessary dependencies for implementing the `wrap_fn` function. Write a Python function `def wrap_fn(fn, cls, method_name, trace, stack_level=1, pre_fn=None, post_fn=None, is_class_method=None)` to solve the following problem:
Wrap a function so that its execution can be traced and its args and return values modified.
Here is the function:
def wrap_fn(fn, cls, method_name, trace, stack_level=1, pre_fn=None, post_fn=None, is_class_method=None):
"""Wrap a function so that its execution can be traced and its args and return values modified."""
class_name = cls.__qualname__
@functools.wraps(fn)
def fn_with_tracing(self, *args: Any, **kwargs: Any):
if class_name not in trace:
trace[class_name] = {}
self_id = id(self)
stack = inspect.stack()
frame = stack[stack_level]
frame_id = id(frame)
stack_len = len(stack) - 1
if self_id not in trace[class_name]:
trace[class_name][self_id] = {}
if method_name not in trace[class_name][self_id]:
trace[class_name][self_id][method_name] = {}
if frame_id not in trace[class_name][self_id][method_name]:
trace[class_name][self_id][method_name][frame_id] = {}
trace_entry = trace[class_name][self_id][method_name][frame_id]
if pre_fn:
# If a pre_fn is specified, it can both record information
# in a trace, as well as return modified args and kwargs
# that will be provided to the actual fn being wrappped
pre_trace, args, kwargs = pre_fn(self, *args, **kwargs)
trace_entry["pre"] = pre_trace
# We record the invocation and the calling location in the trace
trace_entry["frame"] = {
"filename": frame.filename,
"lineno": frame.lineno,
"function": frame.function,
"depth": stack_len,
}
# we cache the dfeault parameters used during the function call
trace_entry["default_args"] = get_default_args(fn)
# we cache also the parameters used during the function call
trace_entry["call_args"] = kwargs
trace_entry["call"] = {"start": time.time_ns()}
ret = fn(self, *args, **kwargs) if not is_class_method else fn(*args, **kwargs)
trace_entry["call"]["end"] = time.time_ns()
if post_fn:
# If a post_fn is specified, it can both record information
# in a trace, as well as modify the value returned from fn
post_trace, ret = post_fn(self, ret)
trace_entry["post"] = post_trace
return ret
return fn_with_tracing | Wrap a function so that its execution can be traced and its args and return values modified. |
155,574 | import errno
import inspect
import os
import os.path as osp
import shutil
import sys
import traceback
from dataclasses import asdict
from getpass import getuser
from importlib.util import module_from_spec, spec_from_file_location
from tempfile import gettempdir
from typing import Any, Callable, Dict, List, Optional, Union
import requests
from fastapi import HTTPException
from pydantic import BaseModel
from lightning.app.api.http_methods import Post
from lightning.app.api.request_types import _APIRequest, _CommandRequest, _RequestResponse
from lightning.app.utilities import frontend
from lightning.app.utilities.app_helpers import Logger, is_overridden
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
from lightning.app.utilities.state import AppState
class ClientCommand:
description: str = ""
requirements: List[str] = []
def __init__(self, method: Callable):
self.method = method
if not self.description:
self.description = self.method.__doc__ or ""
flow = getattr(self.method, "__self__", None)
self.owner = flow.name if flow else None
self.models: Optional[Dict[str, BaseModel]] = None
self.app_url = None
self._state = None
def _setup(self, command_name: str, app_url: str) -> None:
self.command_name = command_name
self.app_url = app_url
def state(self):
if self._state is None:
assert self.app_url
# TODO: Resolve this hack
os.environ["LIGHTNING_APP_STATE_URL"] = "1"
self._state = AppState(host=self.app_url)
self._state._request_state()
os.environ.pop("LIGHTNING_APP_STATE_URL")
return self._state
def run(self, **cli_kwargs) -> None:
"""Overrides with the logic to execute on the client side."""
def invoke_handler(self, config: Optional[BaseModel] = None) -> Dict[str, Any]:
command = self.command_name.replace(" ", "_")
resp = requests.post(self.app_url + f"/command/{command}", data=config.json() if config else None)
if resp.status_code != 200:
try:
detail = str(resp.json())
except Exception:
detail = "Internal Server Error"
print(f"Failed with status code {resp.status_code}. Detail: {detail}")
sys.exit(0)
return resp.json()
def _to_dict(self):
return {"owner": self.owner, "requirements": self.requirements}
def __call__(self, **kwargs: Any):
return self.method(**kwargs)
def _to_annotation(anno: str) -> str:
anno = anno.split("'")[1]
if "." in anno:
return anno.split(".")[-1]
return anno
The provided code snippet includes necessary dependencies for implementing the `_validate_client_command` function. Write a Python function `def _validate_client_command(command: ClientCommand)` to solve the following problem:
Extract method and its metadata from a ClientCommand.
Here is the function:
def _validate_client_command(command: ClientCommand):
"""Extract method and its metadata from a ClientCommand."""
params = inspect.signature(command.method).parameters
command_metadata = {
"cls_path": inspect.getfile(command.__class__),
"cls_name": command.__class__.__name__,
"params": {p.name: _to_annotation(str(p.annotation)) for p in params.values()},
**command._to_dict(),
}
method = command.method
command.models = {}
for k, v in command_metadata["params"].items():
if v == "_empty":
raise Exception(
f"Please, annotate your method {method} with pydantic BaseModel. Refer to the documentation."
)
config = getattr(sys.modules[command.__module__], v, None)
if config is None:
config = getattr(sys.modules[method.__module__], v, None)
if config:
raise Exception(
f"The provided annotation for the argument {k} should in the file "
f"{inspect.getfile(command.__class__)}, not {inspect.getfile(command.method)}."
)
if config is None or not issubclass(config, BaseModel):
raise Exception(
f"The provided annotation for the argument {k} shouldn't an instance of pydantic BaseModel."
) | Extract method and its metadata from a ClientCommand. |
155,575 | import errno
import inspect
import os
import os.path as osp
import shutil
import sys
import traceback
from dataclasses import asdict
from getpass import getuser
from importlib.util import module_from_spec, spec_from_file_location
from tempfile import gettempdir
from typing import Any, Callable, Dict, List, Optional, Union
import requests
from fastapi import HTTPException
from pydantic import BaseModel
from lightning.app.api.http_methods import Post
from lightning.app.api.request_types import _APIRequest, _CommandRequest, _RequestResponse
from lightning.app.utilities import frontend
from lightning.app.utilities.app_helpers import Logger, is_overridden
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
from lightning.app.utilities.state import AppState
def _process_api_request(app, request: _APIRequest):
flow = app.get_component_by_name(request.name)
method = getattr(flow, request.method_name)
try:
response = _RequestResponse(content=method(*request.args, **request.kwargs), status_code=200)
except HTTPException as ex:
logger.error(repr(ex))
response = _RequestResponse(status_code=ex.status_code, content=ex.detail)
except Exception:
logger.error(traceback.print_exc())
response = _RequestResponse(status_code=500)
return {"response": response, "id": request.id}
def _process_command_requests(app, request: _CommandRequest):
for command in app.commands:
for command_name, method in command.items():
command_name = command_name.replace(" ", "_")
if request.method_name == command_name:
# 2.1: Evaluate the method associated to a specific command.
# Validation is done on the CLI side.
try:
response = _RequestResponse(content=method(*request.args, **request.kwargs), status_code=200)
except HTTPException as ex:
logger.error(repr(ex))
response = _RequestResponse(status_code=ex.status_code, content=ex.detail)
except Exception:
logger.error(traceback.print_exc())
response = _RequestResponse(status_code=500)
return {"response": response, "id": request.id}
return None
class _CommandRequest(_BaseRequest):
id: str
name: str
method_name: str
args: Any
kwargs: Any
class _APIRequest(_BaseRequest):
id: str
name: str
method_name: str
args: Any
kwargs: Any
The provided code snippet includes necessary dependencies for implementing the `_process_requests` function. Write a Python function `def _process_requests(app, requests: List[Union[_APIRequest, _CommandRequest]]) -> None` to solve the following problem:
Convert user commands to API endpoint.
Here is the function:
def _process_requests(app, requests: List[Union[_APIRequest, _CommandRequest]]) -> None:
"""Convert user commands to API endpoint."""
responses = []
for request in requests:
if isinstance(request, _APIRequest):
response = _process_api_request(app, request)
else:
response = _process_command_requests(app, request)
if response:
responses.append(response)
app.api_response_queue.put(responses) | Convert user commands to API endpoint. |
155,576 | import json
from typing import Any, Dict
def _duplicate_checker(js):
"""_duplicate_checker verifies that your JSON object doesn't contain duplicate keys."""
result = {}
for name, value in js:
if name in result:
raise ValueError(
f"Unable to load JSON. A duplicate key {name} was detected. JSON objects must have unique keys."
)
result[name] = value
return result
The provided code snippet includes necessary dependencies for implementing the `string2dict` function. Write a Python function `def string2dict(text)` to solve the following problem:
String2dict parses a JSON string into a dictionary, ensuring no keys are duplicated by accident.
Here is the function:
def string2dict(text):
"""String2dict parses a JSON string into a dictionary, ensuring no keys are duplicated by accident."""
if not isinstance(text, str):
text = text.decode("utf-8")
try:
js = json.loads(text, object_pairs_hook=_duplicate_checker)
return js
except ValueError as ex:
raise ValueError(f"Unable to load JSON: {str(ex)}.") | String2dict parses a JSON string into a dictionary, ensuring no keys are duplicated by accident. |
155,577 | import json
from typing import Any, Dict
def is_openapi(obj):
"""is_openopi checks if an object was generated by OpenAPI."""
return hasattr(obj, "swagger_types")
The provided code snippet includes necessary dependencies for implementing the `create_openapi_object` function. Write a Python function `def create_openapi_object(json_obj: Dict, target: Any)` to solve the following problem:
Create the OpenAPI object from the given JSON dict and based on the target object. Lightning AI uses the target object to make new objects from the given JSON spec so the target must be a valid object.
Here is the function:
def create_openapi_object(json_obj: Dict, target: Any):
"""Create the OpenAPI object from the given JSON dict and based on the target object.
Lightning AI uses the target object to make new objects from the given JSON spec so the target must be a valid
object.
"""
if not isinstance(json_obj, dict):
raise TypeError("json_obj must be a dictionary")
if not is_openapi(target):
raise TypeError("target must be an openapi object")
target_attribs = {}
for key, value in json_obj.items():
try:
# user provided key is not a valid key on openapi object
sub_target = getattr(target, key)
except AttributeError:
raise ValueError(f"Field {key} not found in the target object")
if is_openapi(sub_target): # it's an openapi object
target_attribs[key] = create_openapi_object(value, sub_target)
else:
target_attribs[key] = value
# TODO(sherin) - specifically process list and dict and do the validation. Also do the
# verification for enum types
return target.__class__(**target_attribs) | Create the OpenAPI object from the given JSON dict and based on the target object. Lightning AI uses the target object to make new objects from the given JSON spec so the target must be a valid object. |
155,578 | from random import choice, randint
_adjectives = [
# Appearance, sound, smell...
"acrid",
"ambrosial",
"amorphous",
"armored",
"aromatic",
"bald",
"blazing",
"boisterous",
"bouncy",
"brawny",
"bulky",
"camouflaged",
"caped",
"chubby",
"curvy",
"elastic",
"ethereal",
"fat",
"feathered",
"fiery",
"flashy",
"flat",
"fluffy",
"foamy",
"fragrant",
"furry",
"fuzzy",
"glaring",
"hairy",
"heavy",
"hissing",
"horned",
"icy",
"imaginary",
"invisible",
"lean",
"loud",
"loutish",
"lumpy",
"lush",
"masked",
"meaty",
"messy",
"misty",
"nebulous",
"noisy",
"nondescript",
"organic",
"purring",
"quiet",
"quirky",
"radiant",
"roaring",
"ruddy",
"rustling",
"screeching",
"shaggy",
"shapeless",
"shiny",
"silent",
"silky",
"singing",
"skinny",
"smooth",
"soft",
"spicy",
"spiked",
"statuesque",
"sticky",
"tacky",
"tall",
"tangible",
"tentacled",
"thick",
"thundering",
"venomous",
"warm",
"weightless",
"whispering",
"winged",
"wooden",
# Beauty & Charm",
"adorable",
"affable",
"amazing",
"amiable",
"attractive",
"beautiful",
"calm",
"charming",
"cherubic",
"classic",
"classy",
"convivial",
"cordial",
"cuddly",
"curly",
"cute",
"debonair",
"elegant",
"famous",
"fresh",
"friendly",
"funny",
"gorgeous",
"graceful",
"gregarious",
"grinning",
"handsome",
"hilarious",
"hot",
"interesting",
"kind",
"laughing",
"lovely",
"meek",
"mellow",
"merciful",
"neat",
"nifty",
"notorious",
"poetic",
"pretty",
"refined",
"refreshing",
"sexy",
"smiling",
"sociable",
"spiffy",
"stylish",
"sweet",
"tactful",
"whimsical",
"boring",
# Character & Emotions",
"abiding",
"accurate",
"adamant",
"adaptable",
"adventurous",
"alluring",
"aloof",
"ambitious",
"amusing",
"annoying",
"arrogant",
"aspiring",
"belligerent",
"benign",
"berserk",
"benevolent",
"bold",
"brave",
"cheerful",
"chirpy",
"cocky",
"congenial",
"courageous",
"cryptic",
"curious",
"daft",
"dainty",
"daring",
"defiant",
"delicate",
"delightful",
"determined",
"devout",
"didactic",
"diligent",
"discreet",
"dramatic",
"dynamic",
"eager",
"eccentric",
"elated",
"encouraging",
"enigmatic",
"enthusiastic",
"evasive",
"faithful",
"fair",
"fanatic",
"fearless",
"fervent",
"festive",
"fierce",
"fine",
"free",
"gabby",
"garrulous",
"gay",
"gentle",
"glistening",
"greedy",
"grumpy",
"happy",
"honest",
"hopeful",
"hospitable",
"impetuous",
"independent",
"industrious",
"innocent",
"intrepid",
"jolly",
"jovial",
"just",
"lively",
"loose",
"loyal",
"merry",
"modest",
"mysterious",
"nice",
"obedient",
"optimistic",
"orthodox",
"outgoing",
"outrageous",
"overjoyed",
"passionate",
"perky",
"placid",
"polite",
"positive",
"proud",
"prudent",
"puzzling",
"quixotic",
"quizzical",
"rebel",
"resolute",
"rampant",
"righteous",
"romantic",
"rough",
"rousing",
"sassy",
"satisfied",
"sly",
"sincere",
"snobbish",
"spirited",
"spry",
"stalwart",
"stirring",
"swinging",
"tasteful",
"thankful",
"tidy",
"tremendous",
"truthful",
"unselfish",
"upbeat",
"uppish",
"valiant",
"vehement",
"vengeful",
"vigorous",
"vivacious",
"zealous",
"zippy",
# Intelligence & Abilities",
"able",
"adept",
"analytic",
"astute",
"attentive",
"brainy",
"busy",
"calculating",
"capable",
"careful",
"cautious",
"certain",
"clever",
"competent",
"conscious",
"cooperative",
"crafty",
"crazy",
"cunning",
"daffy",
"devious",
"discerning",
"efficient",
"expert",
"functional",
"gifted",
"helpful",
"enlightened",
"idealistic",
"impartial",
"industrious",
"ingenious",
"inquisitive",
"intelligent",
"inventive",
"judicious",
"keen",
"knowing",
"literate",
"logical",
"masterful",
"mindful",
"nonchalant",
"observant",
"omniscient",
"poised",
"practical",
"pragmatic",
"proficient",
"provocative",
"qualified",
"radical",
"rational",
"realistic",
"resourceful",
"savvy",
"sceptical",
"sensible",
"serious",
"shrewd",
"skilled",
"slick",
"slim",
"sloppy",
"smart",
"sophisticated",
"stoic",
"succinct",
"talented",
"thoughtful",
"tricky",
"unbiased",
"uptight",
"versatile",
"versed",
"visionary",
"wise",
"witty",
# Strength & Agility",
"accelerated",
"active",
"agile",
"athletic",
"dashing",
"deft",
"dexterous",
"energetic",
"fast",
"frisky",
"hasty",
"hypersonic",
"meteoric",
"mighty",
"muscular",
"nimble",
"nippy",
"powerful",
"prompt",
"quick",
"rapid",
"resilient",
"robust",
"rugged",
"solid",
"speedy",
"steadfast",
"steady",
"strong",
"sturdy",
"tireless",
"tough",
"unyielding",
# Money & Power",
"rich",
"wealthy",
# Science",
"meticulous",
"precise",
"rigorous",
"scrupulous",
"strict",
# Movement type",
"airborne",
"burrowing",
"crouching",
"flying",
"hidden",
"hopping",
"jumping",
"lurking",
"tunneling",
"warping",
# Location and Dwelling",
"aboriginal",
"amphibian",
"aquatic",
"arboreal",
"polar",
"terrestrial",
"urban",
# Awesome",
"accomplished",
"astonishing",
"authentic",
"awesome",
"delectable",
"excellent",
"exotic",
"exuberant",
"fabulous",
"fantastic",
"fascinating",
"flawless",
"fortunate",
"funky",
"godlike",
"glorious",
"groovy",
"honored",
"illustrious",
"imposing",
"important",
"impressive",
"incredible",
"invaluable",
"kickass",
"majestic",
"magnificent",
"marvellous",
"monumental",
"perfect",
"phenomenal",
"pompous",
"precious",
"premium",
"private",
"remarkable",
"spectacular",
"splendid",
"successful",
"wonderful",
"wondrous",
# Original",
"offbeat",
"original",
"outstanding",
"quaint",
"unique",
# Time",
"ancient",
"antique",
"prehistoric",
"primitive",
# Misc",
"abstract",
"acoustic",
"angelic",
"arcane",
"archetypal",
"augmented",
"auspicious",
"axiomatic",
"beneficial",
"bipedal",
"bizarre",
"complex",
"dancing",
"dangerous",
"demonic",
"divergent",
"economic",
"electric",
"elite",
"eminent",
"enchanted",
"esoteric",
"finicky",
"fractal",
"futuristic",
"gainful",
"hallowed",
"heavenly",
"heretic",
"holistic",
"hungry",
"hypnotic",
"hysterical",
"illegal",
"imperial",
"imported",
"impossible",
"inescapable",
"juicy",
"liberal",
"ludicrous",
"lyrical",
"magnetic",
"manipulative",
"mature",
"military",
"macho",
"married",
"melodic",
"natural",
"naughty",
"nocturnal",
"nostalgic",
"optimal",
"pastoral",
"peculiar",
"piquant",
"pristine",
"prophetic",
"psychedelic",
"quantum",
"rare",
"real",
"secret",
"simple",
"spectral",
"spiritual",
"stereotyped",
"stimulating",
"straight",
"strange",
"tested",
"therapeutic",
"true",
"ubiquitous",
"uncovered",
"unnatural",
"utopian",
"vagabond",
"vague",
"vegan",
"victorious",
"vigilant",
"voracious",
"wakeful",
"wandering",
"watchful",
"wild",
# Pseudo-colors",
"bright",
"brilliant",
"colorful",
"crystal",
"dark",
"dazzling",
"fluorescent",
"glittering",
"glossy",
"gleaming",
"light",
"mottled",
"neon",
"opalescent",
"pastel",
"smoky",
"sparkling",
"spotted",
"striped",
"translucent",
"transparent",
"vivid",
]
_surnames = [
# Muhammad ibn Jabir Al-Battani was a founding father of astronomy. https://en.wikipedia.org/wiki/Al-Battani
"albattani",
# Frances E. Allen, became the first female IBM Fellow in 1989. In 2006, she became the first female
# recipient of the ACM's Turing Award. https://en.wikipedia.org/wiki/Frances_E._Allen
"allen",
# June Almeida - Scottish virologist who took the first pictures of the rubella
# virus - https://en.wikipedia.org/wiki/June_Almeida
"almeida",
# Kathleen Antonelli, American computer programmer and one of the six original
# programmers of the ENIAC - https://en.wikipedia.org/wiki/Kathleen_Antonelli
"antonelli",
# Maria Gaetana Agnesi - Italian mathematician, philosopher, theologian and humanitarian.
# She was the first woman to write a mathematics handbook and the first woman appointed
# as a Mathematics Professor at a University. https://en.wikipedia.org/wiki/Maria_Gaetana_Agnesi
"agnesi",
# Archimedes was a physicist, engineer and mathematician who invented too many
# things to list them here. https://en.wikipedia.org/wiki/Archimedes
"archimedes",
# Maria Ardinghelli - Italian translator, mathematician and physicist -
# https://en.wikipedia.org/wiki/Maria_Ardinghelli
"ardinghelli",
# Aryabhata - Ancient Indian mathematician-astronomer during 476-550 CE https://en.wikipedia.org/wiki/Aryabhata
"aryabhata",
# Wanda Austin - Wanda Austin is the President and CEO of The Aerospace Corporation,
# a leading architect for the US security space programs. https://en.wikipedia.org/wiki/Wanda_Austin
"austin",
# Charles Babbage invented the concept of a programmable computer. https://en.wikipedia.org/wiki/Charles_Babbage.
"babbage",
# Stefan Banach - Polish mathematician, was one of the founders of modern
# functional analysis. https://en.wikipedia.org/wiki/Stefan_Banach
"banach",
# Buckaroo Banzai and his mentor Dr. Hikita perfected the "oscillation overthruster",
# a device that allows one to pass through solid matter. -
# https://en.wikipedia.org/wiki/The_Adventures_of_Buckaroo_Banzai_Across_the_8th_Dimension
"banzai",
# John Bardeen co-invented the transistor - https://en.wikipedia.org/wiki/John_Bardeen
"bardeen",
# Jean Bartik, born Betty Jean Jennings, was one of the original programmers
# for the ENIAC computer. https://en.wikipedia.org/wiki/Jean_Bartik
"bartik",
# Laura Bassi, the world's first female professor https://en.wikipedia.org/wiki/Laura_Bassi
"bassi",
# Hugh Beaver, British engineer, founder of the Guinness Book of World
# Records https://en.wikipedia.org/wiki/Hugh_Beaver
"beaver",
# Alexander Graham Bell - an eminent Scottish-born scientist, inventor, engineer and innovator
# who is credited with inventing the first practical telephone - https://en.wikipedia.org/wiki/Alexander_Graham_Bell
"bell",
# Karl Friedrich Benz - a German automobile engineer. Inventor of the first
# practical motorcar. https://en.wikipedia.org/wiki/Karl_Benz
"benz",
# Homi J Bhabha - was an Indian nuclear physicist, founding director, and professor of
# physics at the Tata Institute of Fundamental Research. Colloquially known as "father of
# Indian nuclear programme"- https://en.wikipedia.org/wiki/Homi_J._Bhabha
"bhabha",
# Bhaskara II - Ancient Indian mathematician-astronomer whose work on calculus predates
# Newton and Leibniz by over half a millennium - https://en.wikipedia.org/wiki/Bh%C4%81skara_II#Calculus
"bhaskara",
# Sue Black - British computer scientist and campaigner. She has been instrumental in
# saving Bletchley Park, the site of World War II codebreaking -
# https://en.wikipedia.org/wiki/Sue_Black_(computer_scientist)
"black",
# Elizabeth Helen Blackburn - Australian-American Nobel laureate; best known
# for co-discovering telomerase. https://en.wikipedia.org/wiki/Elizabeth_Blackburn
"blackburn",
# Elizabeth Blackwell - American doctor and first American woman to receive a
# medical degree - https://en.wikipedia.org/wiki/Elizabeth_Blackwell
"blackwell",
# Niels Bohr is the father of quantum theory. https://en.wikipedia.org/wiki/Niels_Bohr.
"bohr",
# Kathleen Booth, she's credited with writing the first assembly language.
# https://en.wikipedia.org/wiki/Kathleen_Booth
"booth",
# Anita Borg - Anita Borg was the founding director of the Institute for
# Women and Technology (IWT). https://en.wikipedia.org/wiki/Anita_Borg
"borg",
# Satyendra Nath Bose - He provided the foundation for Bose\u2013Einstein statistics
# and the theory of the Bose\u2013Einstein condensate. - https://en.wikipedia.org/wiki/Satyendra_Nath_Bose
"bose",
# Katherine Louise Bouman is an imaging scientist and Assistant Professor of Computer
# Science at the California Institute of Technology. She researches computational methods for
# imaging, and developed an algorithm that made possible the picture first visualization of a
# black hole using the Event Horizon Telescope. - https://en.wikipedia.org/wiki/Katie_Bouman
"bouman",
# Evelyn Boyd Granville - She was one of the first African-American woman to receive a Ph.D.
# in mathematics; she earned it in 1949 from Yale University. https://en.wikipedia.org/wiki/Evelyn_Boyd_Granville
"boyd",
# Brahmagupta - Ancient Indian mathematician during 598-670 CE who gave rules
# to compute with zero - https://en.wikipedia.org/wiki/Brahmagupta#Zero
"brahmagupta",
# Walter Houser Brattain co-invented the transistor - https://en.wikipedia.org/wiki/Walter_Houser_Brattain
"brattain",
# Emmett Brown invented time travel. https://en.wikipedia.org/wiki/Emmett_Brown (thanks Brian Goff)
"brown",
# Linda Brown Buck - American biologist and Nobel laureate best known for her genetic and
# molecular analyses of the mechanisms of smell. https://en.wikipedia.org/wiki/Linda_B._Buck
"buck",
# Dame Susan Jocelyn Bell Burnell - Northern Irish astrophysicist who discovered radio pulsars
# and was the first to analyse them. https://en.wikipedia.org/wiki/Jocelyn_Bell_Burnell
"burnell",
# Annie Jump Cannon - pioneering female astronomer who classified hundreds of thousands of stars
# and created the system we use to understand stars today. https://en.wikipedia.org/wiki/Annie_Jump_Cannon
"cannon",
# Rachel Carson - American marine biologist and conservationist, her book Silent Spring and other
# writings are credited with advancing the global environmental movement.
# https://en.wikipedia.org/wiki/Rachel_Carson
"carson",
# Dame Mary Lucy Cartwright - British mathematician who was one of the first to study what is
# now known as chaos theory. Also known for Cartwright's theorem which finds applications in
# signal processing. https://en.wikipedia.org/wiki/Mary_Cartwright
"cartwright",
# George Washington Carver - American agricultural scientist and inventor. He was the most
# prominent black scientist of the early 20th century. https://en.wikipedia.org/wiki/George_Washington_Carver
"carver",
# Vinton Gray Cerf - American Internet pioneer, recognised as one of "the fathers of the Internet".
# With Robert Elliot Kahn, he designed TCP and IP, the primary data communication protocols of
# the Internet and other computer networks. https://en.wikipedia.org/wiki/Vint_Cerf
"cerf",
# Subrahmanyan Chandrasekhar - Astrophysicist known for his mathematical theory on different
# stages and evolution in structures of the stars. He has won nobel prize for physics -
# https://en.wikipedia.org/wiki/Subrahmanyan_Chandrasekhar
"chandrasekhar",
# Sergey Alexeyevich Chaplygin (April 5, 1869 - October 8, 1942) was a Russian and Soviet physicist,
# mathematician, and mechanical engineer. He is known for mathematical formulas such as Chaplygin's
# equation and for a hypothetical substance in cosmology called Chaplygin gas,
# named after him. https://en.wikipedia.org/wiki/Sergey_Chaplygin
"chaplygin",
# Emilie du Chatelet - French natural philosopher, mathematician, physicist, and author
# during the early 1730s, known for her translation of and commentary on Isaac Newton's book
# Principia containing basic laws of physics. https://en.wikipedia.org/wiki/%C3%89milie_du_Ch%C3%A2telet
"chatelet",
# Asima Chatterjee was an Indian organic chemist noted for her research on vinca alkaloids,
# development of drugs for treatment of epilepsy and malaria - https://en.wikipedia.org/wiki/Asima_Chatterjee
"chatterjee",
# Pafnuty Chebyshev - Russian mathematician. He is known fo his works on probability,
# statistics, mechanics, analytical geometry and number theory https://en.wikipedia.org/wiki/Pafnuty_Chebyshev
"chebyshev",
# Bram Cohen - American computer programmer and author of the BitTorrent
# peer-to-peer protocol. https://en.wikipedia.org/wiki/Bram_Cohen
"cohen",
# David Lee Chaum - American computer scientist and cryptographer. Known for his
# seminal contributions in the field of anonymous communication. https://en.wikipedia.org/wiki/David_Chaum
"chaum",
# Joan Clarke - Bletchley Park code breaker during the Second World War who pioneered techniques
# that remained top secret for decades. Also an accomplished numismatist https://en.wikipedia.org/wiki/Joan_Clarke
"clarke",
# Jane Colden - American botanist widely considered the first female
# American botanist - https://en.wikipedia.org/wiki/Jane_Colden
"colden",
# Gerty Theresa Cori - American biochemist who became the third woman and first American woman to win a
# Nobel Prize in science, and the first woman to be awarded the Nobel Prize in Physiology
# or Medicine. Cori was born in Prague. https://en.wikipedia.org/wiki/Gerty_Cori
"cori",
# Seymour Roger Cray was an American electrical engineer and supercomputer architect who designed a series
# of computers that were the fastest in the world for decades. https://en.wikipedia.org/wiki/Seymour_Cray
"cray",
# This entry reflects a husband and wife team who worked together:
# Joan Curran was a Welsh scientist who developed radar and invented chaff, a radar countermeasure.
# https://en.wikipedia.org/wiki/Joan_Curran Samuel Curran was an Irish physicist who worked
# alongside his wife during WWII and invented the proximity fuse. https://en.wikipedia.org/wiki/Samuel_Curran
"curran",
# Marie Curie discovered radioactivity. https://en.wikipedia.org/wiki/Marie_Curie.
"curie",
# Charles Darwin established the principles of natural evolution. https://en.wikipedia.org/wiki/Charles_Darwin.
"darwin",
# Leonardo Da Vinci invented too many things to list here. https://en.wikipedia.org/wiki/Leonardo_da_Vinci.
"davinci",
# A. K. (Alexander Keewatin) Dewdney, Canadian mathematician, computer scientist, author and filmmaker.
# Contributor to Scientific American's "Computer Recreations" from 1984 to 1991. Author of Core War (program),
# The Planiverse, The Armchair Universe, The Magic Machine, The New Turing Omnibus, and more.
# https://en.wikipedia.org/wiki/Alexander_Dewdney
"dewdney",
# Satish Dhawan - Indian mathematician and aerospace engineer, known for leading the successful and
# indigenous development of the Indian space programme. https://en.wikipedia.org/wiki/Satish_Dhawan
"dhawan",
# Bailey Whitfield Diffie - American cryptographer and one of the pioneers of
# public-key cryptography. https://en.wikipedia.org/wiki/Whitfield_Diffie
"diffie",
# Edsger Wybe Dijkstra was a Dutch computer scientist and mathematical scientist.
# https://en.wikipedia.org/wiki/Edsger_W._Dijkstra.
"dijkstra",
# Paul Adrien Maurice Dirac - English theoretical physicist who made fundamental contributions to the
# early development of both quantum mechanics and quantum electrodynamics. https://en.wikipedia.org/wiki/Paul_Dirac
"dirac",
# Agnes Meyer Driscoll - American cryptanalyst during World Wars I and II who successfully cryptanalysed a
# number of Japanese ciphers. She was also the co-developer of one of the cipher machines of
# the US Navy, the CM. https://en.wikipedia.org/wiki/Agnes_Meyer_Driscoll
"driscoll",
# Donna Dubinsky - played an integral role in the development of personal digital assistants (PDAs)
# serving as CEO of Palm, Inc. and co-founding Handspring. https://en.wikipedia.org/wiki/Donna_Dubinsky
"dubinsky",
# Annie Easley - She was a leading member of the team which developed software for the Centaur
# rocket stage and one of the first African-Americans in her field. https://en.wikipedia.org/wiki/Annie_Easley
"easley",
# Thomas Alva Edison, prolific inventor https://en.wikipedia.org/wiki/Thomas_Edison
"edison",
# Albert Einstein invented the general theory of relativity. https://en.wikipedia.org/wiki/Albert_Einstein
"einstein",
# Alexandra Asanovna Elbakyan is a Kazakhstani graduate student, computer programmer, internet pirate in
# hiding, and the creator of the site Sci-Hub. Nature has listed her in 2016 in the top ten people that
# mattered in science, and Ars Technica has compared her to Aaron Swartz. -
# https://en.wikipedia.org/wiki/Alexandra_Elbakyan
"elbakyan",
# Taher A. ElGamal - Egyptian cryptographer best known for the ElGamal discrete log cryptosystem and the
# ElGamal digital signature scheme. https://en.wikipedia.org/wiki/Taher_Elgamal
"elgamal",
# Gertrude Elion - American biochemist, pharmacologist and the 1988 recipient of the
# Nobel Prize in Medicine - https://en.wikipedia.org/wiki/Gertrude_Elion
"elion",
# James Henry Ellis - British engineer and cryptographer employed by the GCHQ. Best known for
# conceiving for the first time, the idea of public-key cryptography. https://en.wikipedia.org/wiki/James_H._Ellis
"ellis",
# Douglas Engelbart gave the mother of all demos: https://en.wikipedia.org/wiki/Douglas_Engelbart
"engelbart",
# Euclid invented geometry. https://en.wikipedia.org/wiki/Euclid
"euclid",
# Leonhard Euler invented large parts of modern mathematics. https://de.wikipedia.org/wiki/Leonhard_Euler
"euler",
# Michael Faraday - British scientist who contributed to the study of electromagnetism and
# electrochemistry. https://en.wikipedia.org/wiki/Michael_Faraday
"faraday",
# Horst Feistel - German-born American cryptographer who was one of the earliest non-government
# researchers to study the design and theory of block ciphers. Co-developer of DES and Lucifer.
# Feistel networks, a symmetric structure used in the construction of block ciphers are named after him.
# https://en.wikipedia.org/wiki/Horst_Feistel
"feistel",
# Pierre de Fermat pioneered several aspects of modern mathematics. https://en.wikipedia.org/wiki/Pierre_de_Fermat
"fermat",
# Enrico Fermi invented the first nuclear reactor. https://en.wikipedia.org/wiki/Enrico_Fermi.
"fermi",
# Richard Feynman was a key contributor to quantum mechanics and particle physics.
# https://en.wikipedia.org/wiki/Richard_Feynman
"feynman",
# Benjamin Franklin is famous for his experiments in electricity and the invention of the lightning rod.
"franklin",
# Yuri Alekseyevich Gagarin - Soviet pilot and cosmonaut, best known as the first human to
# journey into outer space. https://en.wikipedia.org/wiki/Yuri_Gagarin
"gagarin",
# Galileo was a founding father of modern astronomy, and faced politics and obscurantism to
# establish scientific truth. https://en.wikipedia.org/wiki/Galileo_Galilei
"galileo",
# Evariste Galois - French mathematician whose work laid the foundations of Galois theory and group theory,
# two major branches of abstract algebra, and the subfield of Galois connections, all while still in
# his late teens. https://en.wikipedia.org/wiki/%C3%89variste_Galois
"galois",
# Kadambini Ganguly - Indian physician, known for being the first South Asian female physician,
# trained in western medicine, to graduate in South Asia. https://en.wikipedia.org/wiki/Kadambini_Ganguly
"ganguly",
# William Henry "Bill" Gates III is an American business magnate, philanthropist, investor,
# computer programmer, and inventor. https://en.wikipedia.org/wiki/Bill_Gates
"gates",
# Johann Carl Friedrich Gauss - German mathematician who made significant contributions to many fields,
# including number theory, algebra, statistics, analysis, differential geometry, geodesy, geophysics, mechanics,
# electrostatics, magnetic fields, astronomy, matrix theory, and optics.
# https://en.wikipedia.org/wiki/Carl_Friedrich_Gauss
"gauss",
# Marie-Sophie Germain - French mathematician, physicist and philosopher. Known for her work o
# n elasticity theory, number theory and philosophy. https://en.wikipedia.org/wiki/Sophie_Germain
"germain",
# Adele Goldberg, was one of the designers and developers of the Smalltalk language.
# https://en.wikipedia.org/wiki/Adele_Goldberg_(computer_scientist)
"goldberg",
# Adele Goldstine, born Adele Katz, wrote the complete technical description for the first electronic
# digital computer, ENIAC. https://en.wikipedia.org/wiki/Adele_Goldstine
"goldstine",
# Shafi Goldwasser is a computer scientist known for creating theoretical foundations of modern
# cryptography. Winner of 2012 ACM Turing Award. https://en.wikipedia.org/wiki/Shafi_Goldwasser
"goldwasser",
# James Golick, all around gangster.
"golick",
# Jane Goodall - British primatologist, ethologist, and anthropologist who is considered to be the
# world's foremost expert on chimpanzees - https://en.wikipedia.org/wiki/Jane_Goodall
"goodall",
# Stephen Jay Gould was was an American paleontologist, evolutionary biologist, and historian of science.
# He is most famous for the theory of punctuated equilibrium - https://en.wikipedia.org/wiki/Stephen_Jay_Gould
"gould",
# Carolyn Widney Greider - American molecular biologist and joint winner of the 2009 Nobel Prize for
# Physiology or Medicine for the discovery of telomerase. https://en.wikipedia.org/wiki/Carol_W._Greider
"greider",
# Alexander Grothendieck - German-born French mathematician who became a leading figure in the creation
# of modern algebraic geometry. https://en.wikipedia.org/wiki/Alexander_Grothendieck
"grothendieck",
# Lois Haibt - American computer scientist, part of the team at IBM that developed FORTRAN -
# https://en.wikipedia.org/wiki/Lois_Haibt
"haibt",
# Margaret Hamilton - Director of the Software Engineering Division of the MIT Instrumentation Laboratory,
# which developed on-board flight software for the Apollo space program.
# https://en.wikipedia.org/wiki/Margaret_Hamilton_(scientist)
"hamilton",
# Caroline Harriet Haslett - English electrical engineer, electricity industry administrator and champion of
# women's rights. Co-author of British Standard 1363 that specifies AC power plugs and sockets used across
# the United Kingdom (which is widely considered as one of the safest designs).
# https://en.wikipedia.org/wiki/Caroline_Haslett
"haslett",
# Stephen Hawking pioneered the field of cosmology by combining general relativity and quantum mechanics.
# https://en.wikipedia.org/wiki/Stephen_Hawking
"hawking",
# Martin Edward Hellman - American cryptologist, best known for his invention of public-key cryptography
# in co-operation with Whitfield Diffie and Ralph Merkle. https://en.wikipedia.org/wiki/Martin_Hellman
"hellman",
# Werner Heisenberg was a founding father of quantum mechanics. https://en.wikipedia.org/wiki/Werner_Heisenberg
"heisenberg",
# Grete Hermann was a German philosopher noted for her philosophical work on the foundations of quantum mechanics.
# https://en.wikipedia.org/wiki/Grete_Hermann
"hermann",
# Caroline Lucretia Herschel - German astronomer and discoverer of several comets.
# https://en.wikipedia.org/wiki/Caroline_Herschel
"herschel",
# Heinrich Rudolf Hertz - German physicist who first conclusively proved the existence of the electromagnetic waves.
# https://en.wikipedia.org/wiki/Heinrich_Hertz
"hertz",
# Jaroslav Heyrovsky was the inventor of the polarographic method, father of the electroanalytical method, and
# recipient of the Nobel Prize in 1959. His main field of work was polarography.
# https://en.wikipedia.org/wiki/Jaroslav_Heyrovsk%C3%BD
"heyrovsky",
# Dorothy Hodgkin was a British biochemist, credited with the development of protein crystallography. She was
# awarded the Nobel Prize in Chemistry in 1964. https://en.wikipedia.org/wiki/Dorothy_Hodgkin
"hodgkin",
# Douglas R. Hofstadter is an American professor of cognitive science and author of the Pulitzer Prize and American
# Book Award-winning work Goedel, Escher, Bach: An Eternal Golden Braid in 1979. A mind-bending work which coined
# Hofstadter's Law: "It always takes longer than you expect, even when you take into account Hofstadter's Law."
# https://en.wikipedia.org/wiki/Douglas_Hofstadter
"hofstadter",
# Erna Schneider Hoover revolutionized modern communication by inventing a computerized telephone switching method.
# https://en.wikipedia.org/wiki/Erna_Schneider_Hoover
"hoover",
# Grace Hopper developed the first compiler for a computer programming language and is credited with popularizing
# the term "debugging" for fixing computer glitches. https://en.wikipedia.org/wiki/Grace_Hopper
"hopper",
# Frances Hugle, she was an American scientist, engineer, and inventor who contributed to the understanding of
# semiconductors, integrated circuitry, and the unique electrical principles of microscopic materials.
# https://en.wikipedia.org/wiki/Frances_Hugle
"hugle",
# Hypatia - Greek Alexandrine Neoplatonist philosopher in Egypt who was one of the earliest mothers of mathematics -
# https://en.wikipedia.org/wiki/Hypatia
"hypatia",
# Teruko Ishizaka - Japanese scientist and immunologist who co-discovered the antibody class Immunoglobulin E.
# https://en.wikipedia.org/wiki/Teruko_Ishizaka
"ishizaka",
# Mary Jackson, American mathematician and aerospace engineer who earned the highest title within NASA's engineering
#
# department - https://en.wikipedia.org/wiki/Mary_Jackson_(engineer)
"jackson",
# Yeong-Sil Jang was a Korean scientist and astronomer during the Joseon Dynasty; he invented the first metal
# printing press and water gauge. https://en.wikipedia.org/wiki/Jang_Yeong-sil
"jang",
# Mae Carol Jemison - is an American engineer, physician, and former NASA astronaut. She became the first black
# woman to travel in space when she served as a mission specialist aboard the Space Shuttle Endeavour -
# https://en.wikipedia.org/wiki/Mae_Jemison
"jemison",
# Betty Jennings - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC -
# https://en.wikipedia.org/wiki/Jean_Bartik
"jennings",
# Mary Lou Jepsen, was the founder and chief technology officer of One Laptop Per Child (OLPC), and the founder of
# Pixel Qi. https://en.wikipedia.org/wiki/Mary_Lou_Jepsen
"jepsen",
# Katherine Coleman Goble Johnson - American physicist and mathematician contributed to the NASA.
# https://en.wikipedia.org/wiki/Katherine_Johnson
"johnson",
# Irene Joliot-Curie - French scientist who was awarded the Nobel Prize for Chemistry in 1935. Daughter of Marie
# and Pierre Curie. https://en.wikipedia.org/wiki/Ir%C3%A8ne_Joliot-Curie
"joliot",
# Karen Sparck Jones came up with the concept of inverse document frequency, which is used in most search engines
# today. https://en.wikipedia.org/wiki/Karen_Sp%C3%A4rck_Jones
"jones",
# A. P. J. Abdul Kalam - is an Indian scientist aka Missile Man of India for his work on the development of
# ballistic missile and launch vehicle technology - https://en.wikipedia.org/wiki/A._P._J._Abdul_Kalam
"kalam",
# Sergey Petrovich Kapitsa (14 February 1928 - 14 August 2012) was a Russian physicist and demographer. He was best
# known as host of the popular and long-running Russian scientific TV show, Evident, but Incredible. His father was
# the Nobel laureate Soviet-era physicist Pyotr Kapitsa, and his brother was the geographer and Antarctic explorer
# Andrey Kapitsa. - https://en.wikipedia.org/wiki/Sergey_Kapitsa
"kapitsa",
# Susan Kare, created the icons and many of the interface elements for the original Apple Macintosh in the 1980s,
# and was an original employee of NeXT, working as the Creative Director. https://en.wikipedia.org/wiki/Susan_Kare
"kare",
# Mstislav Keldysh - a Soviet scientist in the field of mathematics and mechanics, academician of the USSR Academy
# of Sciences (1946), President of the USSR Academy of Sciences (1961-1975),
# three times Hero of Socialist Labor (1956, 1961, 1971), fellow of the Royal Society of Edinburgh (1968).
# https://en.wikipedia.org/wiki/Mstislav_Keldysh
"keldysh",
# Mary Kenneth Keller, Sister Mary Kenneth Keller became the first American woman to earn a
# PhD in Computer Science in 1965. https://en.wikipedia.org/wiki/Mary_Kenneth_Keller
"keller",
# Johannes Kepler, German astronomer known for his three laws of planetary motion -
# https://en.wikipedia.org/wiki/Johannes_Kepler
"kepler",
# Omar Khayyam - Persian mathematician, astronomer and poet. Known for his work on the classification and solution
# of cubic equations, for his contribution to the understanding of Euclid's fifth postulate and for computing the
# length of a year very accurately. https://en.wikipedia.org/wiki/Omar_Khayyam
"khayyam",
# Har Gobind Khorana - Indian-American biochemist who shared the 1968 Nobel Prize for Physiology -
# https://en.wikipedia.org/wiki/Har_Gobind_Khorana
"khorana",
# Jack Kilby invented silicon integrated circuits and gave Silicon Valley its name. -
# https://en.wikipedia.org/wiki/Jack_Kilby
"kilby",
# Maria Kirch - German astronomer and first woman to discover a comet -
# https://en.wikipedia.org/wiki/Maria_Margarethe_Kirch
"kirch",
# Donald Knuth - American computer scientist, author of "The Art of Computer Programming" and creator of the TeX
# typesetting system. https://en.wikipedia.org/wiki/Donald_Knuth
"knuth",
# Sophie Kowalevski - Russian mathematician responsible for important original contributions to analysis,
# differential equations and mechanics - https://en.wikipedia.org/wiki/Sofia_Kovalevskaya
"kowalevski",
# Marie-Jeanne de Lalande - French astronomer, mathematician and cataloguer of stars -
# https://en.wikipedia.org/wiki/Marie-Jeanne_de_Lalande
"lalande",
# Hedy Lamarr - Actress and inventor. The principles of her work are now incorporated into modern Wi-Fi, CDMA
# and Bluetooth technology. https://en.wikipedia.org/wiki/Hedy_Lamarr
"lamarr",
# Leslie B. Lamport - American computer scientist. Lamport is best known for his seminal work in distributed
# systems and was the winner of the 2013 Turing Award. https://en.wikipedia.org/wiki/Leslie_Lamport
"lamport",
# Mary Leakey - British paleoanthropologist who discovered the first fossilized Proconsul skull -
# https://en.wikipedia.org/wiki/Mary_Leakey
"leakey",
# Henrietta Swan Leavitt - she was an American astronomer who discovered the relation between the luminosity and
# the period of Cepheid variable stars. https://en.wikipedia.org/wiki/Henrietta_Swan_Leavitt
"leavitt",
# Esther Miriam Zimmer Lederberg - American microbiologist and a pioneer of bacterial genetics.
# https://en.wikipedia.org/wiki/Esther_Lederberg
"lederberg",
# Inge Lehmann - Danish seismologist and geophysicist. Known for discovering in 1936 that the Earth has a solid
# inner core inside a molten outer core. https://en.wikipedia.org/wiki/Inge_Lehmann
"lehmann",
# Daniel Lewin - Mathematician, Akamai co-founder, soldier, 9/11 victim-- Developed optimization techniques for
# routing traffic on the internet. Died attempting to stop the 9-11 hijackers.
# https://en.wikipedia.org/wiki/Daniel_Lewin
"lewin",
# Ruth Lichterman - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC -
# https://en.wikipedia.org/wiki/Ruth_Teitelbaum
"lichterman",
# Barbara Liskov - co-developed the Liskov substitution principle. Liskov was also the winner of the Turing
# Prize in 2008. - https://en.wikipedia.org/wiki/Barbara_Liskov
"liskov",
# Ada Lovelace invented the first algorithm. https://en.wikipedia.org/wiki/Ada_Lovelace (thanks James Turnbull)
"lovelace",
# Auguste and Louis Lumiere - the first filmmakers in history -
# https://en.wikipedia.org/wiki/Auguste_and_Louis_Lumi%C3%A8re
"lumiere",
# Mahavira - Ancient Indian mathematician during 9th century AD who discovered basic algebraic identities -
# https://en.wikipedia.org/wiki/Mah%C4%81v%C4%ABra_(mathematician)
"mahavira",
# Lynn Margulis (b. Lynn Petra Alexander) - an American evolutionary theorist and biologist, science author,
# educator, and popularizer, and was the primary modern proponent for the significance of symbiosis in evolution. -
# https://en.wikipedia.org/wiki/Lynn_Margulis
"margulis",
# Yukihiro Matsumoto - Japanese computer scientist and software programmer best known as the chief designer of
# the Ruby programming language. https://en.wikipedia.org/wiki/Yukihiro_Matsumoto
"matsumoto",
# James Clerk Maxwell - Scottish physicist, best known for his formulation of electromagnetic theory.
# https://en.wikipedia.org/wiki/James_Clerk_Maxwell
"maxwell",
# Maria Mayer - American theoretical physicist and Nobel laureate in Physics for proposing the nuclear shell model
# of the atomic nucleus - https://en.wikipedia.org/wiki/Maria_Mayer
"mayer",
# John McCarthy invented LISP: https://en.wikipedia.org/wiki/John_McCarthy_(computer_scientist)
"mccarthy",
# Barbara McClintock - a distinguished American cytogeneticist, 1983 Nobel Laureate in Physiology or Medicine for
# discovering transposons. https://en.wikipedia.org/wiki/Barbara_McClintock
"mcclintock",
# Anne Laura Dorinthea McLaren - British developmental biologist whose work helped lead to human
# in-vitro fertilisation. https://en.wikipedia.org/wiki/Anne_McLaren
"mclaren",
# Malcolm McLean invented the modern shipping container: https://en.wikipedia.org/wiki/Malcom_McLean
"mclean",
# Kay McNulty - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC -
# https://en.wikipedia.org/wiki/Kathleen_Antonelli
"mcnulty",
# Gregor Johann Mendel - Czech scientist and founder of genetics. https://en.wikipedia.org/wiki/Gregor_Mendel
"mendel",
# Dmitri Mendeleev - a chemist and inventor. He formulated the Periodic Law, created a farsighted version of the
# periodic table of elements, and used it to correct the properties of some already discovered elements and also
# to predict the properties of eight elements yet to be discovered. https://en.wikipedia.org/wiki/Dmitri_Mendeleev
"mendeleev",
# Lise Meitner - Austrian/Swedish physicist who was involved in the discovery of nuclear fission. The element
# meitnerium is named after her - https://en.wikipedia.org/wiki/Lise_Meitner
"meitner",
# Carla Meninsky, was the game designer and programmer for Atari 2600 games Dodge 'Em and Warlords.
# https://en.wikipedia.org/wiki/Carla_Meninsky
"meninsky",
# Ralph C. Merkle - American computer scientist, known for devising Merkle's puzzles - one of the very first
# schemes for public-key cryptography. Also, inventor of Merkle trees and co-inventor of the Merkle-Damgard
# construction for building collision-resistant cryptographic hash functions and the Merkle-Hellman knapsack
# cryptosystem. https://en.wikipedia.org/wiki/Ralph_Merkle
"merkle",
# Johanna Mestorf - German prehistoric archaeologist and first female museum director in Germany -
# https://en.wikipedia.org/wiki/Johanna_Mestorf
"mestorf",
# Maryam Mirzakhani - an Iranian mathematician and the first woman to win the Fields Medal.
# https://en.wikipedia.org/wiki/Maryam_Mirzakhani
"mirzakhani",
# Rita Levi-Montalcini - Won Nobel Prize in Physiology or Medicine jointly with colleague Stanley Cohen for the
# discovery of nerve growth factor (https://en.wikipedia.org/wiki/Rita_Levi-Montalcini)
"montalcini",
# Gordon Earle Moore - American engineer, Silicon Valley founding father, author of Moore's law.
# https://en.wikipedia.org/wiki/Gordon_Moore
"moore",
# Samuel Morse - contributed to the invention of a single-wire telegraph system based on European telegraphs
# and was a co-developer of the Morse code - https://en.wikipedia.org/wiki/Samuel_Morse
"morse",
# Ian Murdock - founder of the Debian project - https://en.wikipedia.org/wiki/Ian_Murdock
"murdock",
# May-Britt Moser - Nobel prize winner neuroscientist who contributed to the discovery of grid cells in the brain.
# https://en.wikipedia.org/wiki/May-Britt_Moser
"moser",
# John Napier of Merchiston - Scottish landowner known as an astronomer, mathematician and physicist.
# Best known for his discovery of logarithms. https://en.wikipedia.org/wiki/John_Napier
"napier",
# John Forbes Nash, Jr. - American mathematician who made fundamental contributions to game theory, differential
# geometry, and the study of partial differential equations. https://en.wikipedia.org/wiki/John_Forbes_Nash_Jr.
"nash",
# John von Neumann - todays computer architectures are based on the von Neumann architecture.
# https://en.wikipedia.org/wiki/Von_Neumann_architecture
"neumann",
# Isaac Newton invented classic mechanics and modern optics. https://en.wikipedia.org/wiki/Isaac_Newton
"newton",
# Florence Nightingale, more prominently known as a nurse, was also the first female member of the Royal Statistical
# Society and a pioneer in statistical graphics
# https://en.wikipedia.org/wiki/Florence_Nightingale#Statistics_and_sanitary_reform
"nightingale",
# Alfred Nobel - a Swedish chemist, engineer, innovator, and armaments manufacturer (inventor of dynamite) -
# https://en.wikipedia.org/wiki/Alfred_Nobel
"nobel",
# Emmy Noether, German mathematician. Noether's Theorem is named after her.
# https://en.wikipedia.org/wiki/Emmy_Noether
"noether",
# Poppy Northcutt. Poppy Northcutt was the first woman to work as part of NASA's Mission Control.
# http://www.businessinsider.com/poppy-northcutt-helped-apollo-astronauts-2014-12?op=1
"northcutt",
# Robert Noyce invented silicon integrated circuits and gave Silicon Valley its name. -
# https://en.wikipedia.org/wiki/Robert_Noyce
"noyce",
# Panini - Ancient Indian linguist and grammarian from 4th century CE who worked on the world's first formal system
# - https://en.wikipedia.org/wiki/P%C4%81%E1%B9%87ini#Comparison_with_modern_formal_systems
"panini",
# Ambroise Pare invented modern surgery. https://en.wikipedia.org/wiki/Ambroise_Par%C3%A9
"pare",
# Blaise Pascal, French mathematician, physicist, and inventor - https://en.wikipedia.org/wiki/Blaise_Pascal
"pascal",
# Louis Pasteur discovered vaccination, fermentation and pasteurization.
# https://en.wikipedia.org/wiki/Louis_Pasteur.
"pasteur",
# Cecilia Payne-Gaposchkin was an astronomer and astrophysicist who, in 1925, proposed in her Ph.D. thesis an
# explanation for the composition of stars in terms of the relative abundances of hydrogen and helium.
# https://en.wikipedia.org/wiki/Cecilia_Payne-Gaposchkin
"payne",
# Radia Perlman is a software designer and network engineer and most famous for her invention of the
# spanning-tree protocol (STP). https://en.wikipedia.org/wiki/Radia_Perlman
"perlman",
# Rob Pike was a key contributor to Unix, Plan 9, the X graphic system, utf-8, and the Go programming language.
# https://en.wikipedia.org/wiki/Rob_Pike
"pike",
# Henri Poincare made fundamental contributions in several fields of mathematics.
# https://en.wikipedia.org/wiki/Henri_Poincar%C3%A9
"poincare",
# Laura Poitras is a director and producer whose work, made possible by open source crypto tools, advances the
# causes of truth and freedom of information by reporting disclosures by whistleblowers such as Edward Snowden.
# https://en.wikipedia.org/wiki/Laura_Poitras
"poitras",
# Tat'yana Avenirovna Proskuriakova (January 23 [O.S. January 10] 1909 - August 30, 1985) was a Russian-American
# Mayanist scholar and archaeologist who contributed significantly to the deciphering of Maya hieroglyphs, the
# writing system of the pre-Columbian Maya civilization of Mesoamerica.
# https://en.wikipedia.org/wiki/Tatiana_Proskouriakoff
"proskuriakova",
# Claudius Ptolemy - a Greco-Egyptian writer of Alexandria, known as a mathematician, astronomer, geographer,
# astrologer, and poet of a single epigram in the Greek Anthology - https://en.wikipedia.org/wiki/Ptolemy
"ptolemy",
# C. V. Raman - Indian physicist who won the Nobel Prize in 1930 for proposing the Raman effect. -
# https://en.wikipedia.org/wiki/C._V._Raman
"raman",
# Srinivasa Ramanujan - Indian mathematician and autodidact who made extraordinary contributions to mathematical
# analysis, number theory, infinite series, and continued fractions. -
# https://en.wikipedia.org/wiki/Srinivasa_Ramanujan
"ramanujan",
# Sally Kristen Ride was an American physicist and astronaut. She was the first American woman in space, and the
# youngest American astronaut. https://en.wikipedia.org/wiki/Sally_Ride
"ride",
# Dennis Ritchie - co-creator of UNIX and the C programming language. - https://en.wikipedia.org/wiki/Dennis_Ritchie
"ritchie",
# Ida Rhodes - American pioneer in computer programming, designed the first computer used for Social Security.
# https://en.wikipedia.org/wiki/Ida_Rhodes
"rhodes",
# Julia Hall Bowman Robinson - American mathematician renowned for her contributions to the fields of computability
# theory and computational complexity theory. https://en.wikipedia.org/wiki/Julia_Robinson
"robinson",
# Wilhelm Conrad Rontgen - German physicist who was awarded the first Nobel Prize in Physics in 1901 for the
# discovery of X-rays (Rontgen rays). https://en.wikipedia.org/wiki/Wilhelm_R%C3%B6ntgen
"roentgen",
# Rosalind Franklin - British biophysicist and X-ray crystallographer whose research was critical to the
# understanding of DNA - https://en.wikipedia.org/wiki/Rosalind_Franklin
"rosalind",
# Vera Rubin - American astronomer who pioneered work on galaxy rotation rates.
# https://en.wikipedia.org/wiki/Vera_Rubin
"rubin",
# Meghnad Saha - Indian astrophysicist best known for his development of the Saha equation, used to describe
# chemical and physical conditions in stars - https://en.wikipedia.org/wiki/Meghnad_Saha
"saha",
# Jean E. Sammet developed FORMAC, the first widely used computer language for symbolic manipulation of
# mathematical formulas. https://en.wikipedia.org/wiki/Jean_E._Sammet
"sammet",
# Mildred Sanderson - American mathematician best known for Sanderson's theorem concerning modular invariants.
# https://en.wikipedia.org/wiki/Mildred_Sanderson
"sanderson",
# Satoshi Nakamoto is the name used by the unknown person or group of people who developed bitcoin, authored the
# bitcoin white paper, and created and deployed bitcoin's original reference implementation.
# https://en.wikipedia.org/wiki/Satoshi_Nakamoto
"satoshi",
# Adi Shamir - Israeli cryptographer whose numerous inventions and contributions to cryptography include the Ferge
# Fiat Shamir identification scheme, the Rivest Shamir Adleman (RSA) public-key cryptosystem, the Shamir's secret
# sharing scheme, the breaking of the Merkle-Hellman cryptosystem, the TWINKLE and TWIRL factoring devices and the
# discovery of differential cryptanalysis (with Eli Biham). https://en.wikipedia.org/wiki/Adi_Shamir
"shamir",
# Claude Shannon - The father of information theory and founder of digital circuit design theory.
# (https://en.wikipedia.org/wiki/Claude_Shannon)
"shannon",
# Carol Shaw - Originally an Atari employee, Carol Shaw is said to be the first female video game designer.
# https://en.wikipedia.org/wiki/Carol_Shaw_(video_game_designer)
"shaw",
# Dame Stephanie "Steve" Shirley - Founded a software company in 1962 employing women working from home.
# https://en.wikipedia.org/wiki/Steve_Shirley
"shirley",
# William Shockley co-invented the transistor - https://en.wikipedia.org/wiki/William_Shockley
"shockley",
# Lina Solomonovna Stern (or Shtern; 26 August 1878 - 7 March 1968) was a Soviet biochemist, physiologist and
# humanist whose medical discoveries saved thousands of lives at the fronts of World War II. She is best known
# for her pioneering work on blood\u2013brain barrier, which she described as hemato-encephalic barrier in 1921.
# https://en.wikipedia.org/wiki/Lina_Stern
"shtern",
# Francoise Barre-Sinoussi - French virologist and Nobel Prize Laureate in Physiology or Medicine; her work was
# fundamental in identifying HIV as the cause of AIDS.
# https://en.wikipedia.org/wiki/Fran%C3%A7oise_Barr%C3%A9-Sinoussi
"sinoussi",
# Betty Snyder - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC -
# https://en.wikipedia.org/wiki/Betty_Holberton
"snyder",
# Cynthia Solomon - Pioneer in the fields of artificial intelligence, computer science and educational computing.
# Known for creation of Logo, an educational programming language. https://en.wikipedia.org/wiki/Cynthia_Solomon
"solomon",
# Frances Spence - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC -
# https://en.wikipedia.org/wiki/Frances_Spence
"spence",
# Michael Stonebraker is a database research pioneer and architect of Ingres, Postgres, VoltDB and SciDB.
# Winner of 2014 ACM Turing Award. https://en.wikipedia.org/wiki/Michael_Stonebraker
"stonebraker",
# Ivan Edward Sutherland - American computer scientist and Internet pioneer, widely regarded as the father of
# computer graphics. https://en.wikipedia.org/wiki/Ivan_Sutherland
"sutherland",
# Janese Swanson (with others) developed the first of the Carmen Sandiego games. She went on to found Girl Tech.
# https://en.wikipedia.org/wiki/Janese_Swanson
"swanson",
# Aaron Swartz was influential in creating RSS, Markdown, Creative Commons, Reddit, and much of the internet as we
# know it today. He was devoted to freedom of information on the web. https://en.wikiquote.org/wiki/Aaron_Swartz
"swartz",
# Bertha Swirles was a theoretical physicist who made a number of contributions to early quantum theory.
# https://en.wikipedia.org/wiki/Bertha_Swirles
"swirles",
# Helen Brooke Taussig - American cardiologist and founder of the field of paediatric cardiology.
# https://en.wikipedia.org/wiki/Helen_B._Taussig
"taussig",
# Valentina Tereshkova is a Russian engineer, cosmonaut and politician. She was the first woman to fly to space in
# 1963. In 2013, at the age of 76, she offered to go on a one-way mission to Mars.
# https://en.wikipedia.org/wiki/Valentina_Tereshkova
"tereshkova",
# Nikola Tesla invented the AC electric system and every gadget ever used by a James Bond villain.
# https://en.wikipedia.org/wiki/Nikola_Tesla
"tesla",
# Marie Tharp - American geologist and oceanic cartographer who co-created the first scientific map of the Atlantic
# Ocean floor. Her work led to the acceptance of the theories of plate tectonics and continental drift.
# https://en.wikipedia.org/wiki/Marie_Tharp
"tharp",
# Ken Thompson - co-creator of UNIX and the C programming language - https://en.wikipedia.org/wiki/Ken_Thompson
"thompson",
# Linus Torvalds invented Linux and Git. https://en.wikipedia.org/wiki/Linus_Torvalds
"torvalds",
# Youyou Tu - Chinese pharmaceutical chemist and educator known for discovering artemisinin and dihydroartemisinin,
# used to treat malaria, which has saved millions of lives. Joint winner of the 2015 Nobel Prize in Physiology or
# Medicine. https://en.wikipedia.org/wiki/Tu_Youyou
"tu",
# Alan Turing was a founding father of computer science. https://en.wikipedia.org/wiki/Alan_Turing.
"turing",
# Varahamihira - Ancient Indian mathematician who discovered trigonometric formulae during 505-587 CE -
# https://en.wikipedia.org/wiki/Var%C4%81hamihira#Contributions
"varahamihira",
# Dorothy Vaughan was a NASA mathematician and computer programmer on the SCOUT launch vehicle program that put
# America's first satellites into space - https://en.wikipedia.org/wiki/Dorothy_Vaughan
"vaughan",
# Sir Mokshagundam Visvesvaraya - is a notable Indian engineer. He is a recipient of the Indian Republic's highest
# honour, the Bharat Ratna, in 1955. On his birthday, 15 September is celebrated as Engineer's Day in India in his
# memory - https://en.wikipedia.org/wiki/Visvesvaraya
"visvesvaraya",
# Christiane Nusslein-Volhard - German biologist, won Nobel Prize in Physiology or Medicine in 1995 for research on
# the genetic control of embryonic development. https://en.wikipedia.org/wiki/Christiane_N%C3%BCsslein-Volhard
"volhard",
# Cedric Villani - French mathematician, won Fields Medal, Fermat Prize and Poincare Price for his work in
# differential geometry and statistical mechanics. https://en.wikipedia.org/wiki/C%C3%A9dric_Villani
"villani",
# Marlyn Wescoff - one of the original programmers of the ENIAC. https://en.wikipedia.org/wiki/ENIAC -
# https://en.wikipedia.org/wiki/Marlyn_Meltzer
"wescoff",
# Sylvia B. Wilbur - British computer scientist who helped develop the ARPANET, was one of the first to exchange
# email in the UK and a leading researcher in computer-supported collaborative work.
# https://en.wikipedia.org/wiki/Sylvia_Wilbur
"wilbur",
# Andrew Wiles - Notable British mathematician who proved the enigmatic Fermat's Last Theorem -
# https://en.wikipedia.org/wiki/Andrew_Wiles
"wiles",
# Roberta Williams, did pioneering work in graphical adventure games for personal computers, particularly the King's
# Quest series. https://en.wikipedia.org/wiki/Roberta_Williams
"williams",
# Malcolm John Williamson - British mathematician and cryptographer employed by the GCHQ. Developed in 1974 what
# is now known as Diffie-Hellman key exchange (Diffie and Hellman first published the scheme in 1976).
# https://en.wikipedia.org/wiki/Malcolm_J._Williamson
"williamson",
# Sophie Wilson designed the first Acorn Micro-Computer and the instruction set for ARM processors.
# https://en.wikipedia.org/wiki/Sophie_Wilson
"wilson",
# Jeannette Wing - co-developed the Liskov substitution principle. - https://en.wikipedia.org/wiki/Jeannette_Wing
"wing",
# Steve Wozniak invented the Apple I and Apple II. https://en.wikipedia.org/wiki/Steve_Wozniak
"wozniak",
# The Wright brothers, Orville and Wilbur - credited with inventing and building the world's first successful
# airplane and making the first controlled, powered and sustained heavier-than-air human flight -
# https://en.wikipedia.org/wiki/Wright_brothers
"wright",
# Chien-Shiung Wu - Chinese-American experimental physicist who made significant contributions to nuclear physics.
# https://en.wikipedia.org/wiki/Chien-Shiung_Wu
"wu",
# Rosalyn Sussman Yalow - Rosalyn Sussman Yalow was an American medical physicist, and a co-winner of the 1977
# Nobel Prize in Physiology or Medicine for development of the radioimmunoassay technique.
# https://en.wikipedia.org/wiki/Rosalyn_Sussman_Yalow
"yalow",
# Ada Yonath - an Israeli crystallographer, the first woman from the Middle East to win a Nobel prize in the
# sciences. https://en.wikipedia.org/wiki/Ada_Yonath
"yonath",
# Nikolay Yegorovich Zhukovsky (January 17 1847 - March 17, 1921) was a Russian scientist, mathematician and
# engineer, and a founding father of modern aero- and hydrodynamics. Whereas contemporary scientists scoffed at the
# idea of human flight, Zhukovsky was the first to undertake the study of airflow. He is often called the Father
# of Russian Aviation. https://en.wikipedia.org/wiki/Nikolay_Yegorovich_Zhukovsky
"zhukovsky",
]
The provided code snippet includes necessary dependencies for implementing the `get_unique_name` function. Write a Python function `def get_unique_name()` to solve the following problem:
Generates a random name in the style of "docker containers". This is generated from the list of adjectives and surnames in this package, formatted as "adjective_surname" with a random integer between 0 and 10000 added to the end. A python port of docker's random container name generator. Original source: https://raw.githubusercontent.com/moby/moby/master/pkg/namesgenerator/names-generator.go Examples: >>> import random ; random.seed(42) >>> get_unique_name() 'meek-ardinghelli-4506' >>> get_unique_name() 'truthful-dijkstra-2286'
Here is the function:
def get_unique_name():
"""Generates a random name in the style of "docker containers".
This is generated from the list of adjectives and surnames in this package,
formatted as "adjective_surname" with a random integer between 0 and 10000
added to the end.
A python port of docker's random container name generator.
Original source:
https://raw.githubusercontent.com/moby/moby/master/pkg/namesgenerator/names-generator.go
Examples:
>>> import random ; random.seed(42)
>>> get_unique_name()
'meek-ardinghelli-4506'
>>> get_unique_name()
'truthful-dijkstra-2286'
"""
adjective, surname, i = choice(_adjectives), choice(_surnames), randint(0, 9999) # noqa: S311
return f"{adjective}-{surname}-{i}" | Generates a random name in the style of "docker containers". This is generated from the list of adjectives and surnames in this package, formatted as "adjective_surname" with a random integer between 0 and 10000 added to the end. A python port of docker's random container name generator. Original source: https://raw.githubusercontent.com/moby/moby/master/pkg/namesgenerator/names-generator.go Examples: >>> import random ; random.seed(42) >>> get_unique_name() 'meek-ardinghelli-4506' >>> get_unique_name() 'truthful-dijkstra-2286' |
155,579 | import hashlib
from pathlib import Path
The provided code snippet includes necessary dependencies for implementing the `get_hash` function. Write a Python function `def get_hash(path: Path, chunk_num_blocks: int = 128) -> str` to solve the following problem:
Get the hash of a file.
Here is the function:
def get_hash(path: Path, chunk_num_blocks: int = 128) -> str:
"""Get the hash of a file."""
h = hashlib.blake2b(digest_size=20)
if not path.exists():
raise FileNotFoundError(f"{path} does not exist")
with path.open("rb") as f:
for chunk in iter(lambda: f.read(chunk_num_blocks * h.block_size), b""):
h.update(chunk)
return h.hexdigest() | Get the hash of a file. |
155,580 | import subprocess
from pathlib import Path
from typing import List, Union
def execute_git_command(args: List[str], cwd=None) -> str:
"""Executes a git command. This is expected to return a single string back.
Returns
-------
output: str
String combining stdout and stderr.
"""
process = subprocess.run(["git"] + args, capture_output=True, text=True, cwd=cwd, check=False)
return process.stdout.strip() + process.stderr.strip()
def check_github_repository(cwd=None) -> bool:
"""Checks if the active directory is a GitHub repository."""
github_repository = execute_git_command(["config", "--get", "remote.origin.url"], cwd=cwd)
if not github_repository or "github.com" not in github_repository:
return False
return True
The provided code snippet includes necessary dependencies for implementing the `get_git_relative_path` function. Write a Python function `def get_git_relative_path(file: Union[str, Path]) -> str` to solve the following problem:
Finds the relative path of the file to the git root.
Here is the function:
def get_git_relative_path(file: Union[str, Path]) -> str:
"""Finds the relative path of the file to the git root."""
if not check_github_repository():
raise ValueError("Not a GitHub repository.")
abs_path = Path(file).absolute()
repository_path = execute_git_command(["rev-parse", "--show-toplevel"])
return str(abs_path.relative_to(repository_path)) | Finds the relative path of the file to the git root. |
155,581 | import subprocess
from pathlib import Path
from typing import List, Union
def execute_git_command(args: List[str], cwd=None) -> str:
"""Executes a git command. This is expected to return a single string back.
Returns
-------
output: str
String combining stdout and stderr.
"""
process = subprocess.run(["git"] + args, capture_output=True, text=True, cwd=cwd, check=False)
return process.stdout.strip() + process.stderr.strip()
The provided code snippet includes necessary dependencies for implementing the `check_if_remote_head_is_different` function. Write a Python function `def check_if_remote_head_is_different() -> Union[bool, None]` to solve the following problem:
Checks if remote git repository is different than the version available locally. This only compares the local SHA to the HEAD commit of a given branch. This check won't be used if user isn't in a HEAD locally.
Here is the function:
def check_if_remote_head_is_different() -> Union[bool, None]:
"""Checks if remote git repository is different than the version available locally.
This only compares the local SHA to the HEAD commit of a given branch. This check won't be used if user isn't in a
HEAD locally.
"""
# Check SHA values.
local_sha = execute_git_command(["rev-parse", "@"])
remote_sha = execute_git_command(["rev-parse", r"@{u}"])
base_sha = execute_git_command(["merge-base", "@", r"@{u}"])
# Whenever a SHA is not avaialble, just return.
if any("fatal" in f for f in (local_sha, remote_sha, base_sha)):
return None
return local_sha not in (remote_sha, base_sha) | Checks if remote git repository is different than the version available locally. This only compares the local SHA to the HEAD commit of a given branch. This check won't be used if user isn't in a HEAD locally. |
155,582 | import subprocess
from pathlib import Path
from typing import List, Union
def execute_git_command(args: List[str], cwd=None) -> str:
"""Executes a git command. This is expected to return a single string back.
Returns
-------
output: str
String combining stdout and stderr.
"""
process = subprocess.run(["git"] + args, capture_output=True, text=True, cwd=cwd, check=False)
return process.stdout.strip() + process.stderr.strip()
The provided code snippet includes necessary dependencies for implementing the `has_uncommitted_files` function. Write a Python function `def has_uncommitted_files() -> bool` to solve the following problem:
Checks if user has uncommited files in local repository. If there are uncommited files, then show a prompt indicating that uncommited files exist locally.
Here is the function:
def has_uncommitted_files() -> bool:
"""Checks if user has uncommited files in local repository.
If there are uncommited files, then show a prompt indicating that uncommited files exist locally.
"""
files = execute_git_command(["update-index", "--refresh"])
return bool(files) | Checks if user has uncommited files in local repository. If there are uncommited files, then show a prompt indicating that uncommited files exist locally. |
155,583 | import socket
from functools import wraps
from typing import Any, Callable, Dict, Optional
from urllib.parse import urljoin
import requests
from lightning_cloud.rest_client import GridRestClient, LightningClient, create_swagger_client
from requests import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError, ConnectTimeout, ReadTimeout
from urllib3.util.retry import Retry
from lightning.app.core import constants
from lightning.app.utilities.app_helpers import Logger
_CONNECTION_RETRY_TOTAL = 2880
_CONNECTION_RETRY_BACKOFF_FACTOR = 0.5
The provided code snippet includes necessary dependencies for implementing the `_configure_session` function. Write a Python function `def _configure_session() -> Session` to solve the following problem:
Configures the session for GET and POST requests. It enables a generous retrial strategy that waits for the application server to connect.
Here is the function:
def _configure_session() -> Session:
"""Configures the session for GET and POST requests.
It enables a generous retrial strategy that waits for the application server to connect.
"""
retry_strategy = Retry(
# wait time between retries increases exponentially according to: backoff_factor * (2 ** (retry - 1))
total=_CONNECTION_RETRY_TOTAL,
backoff_factor=_CONNECTION_RETRY_BACKOFF_FACTOR,
status_forcelist=[429, 500, 502, 503, 504],
)
adapter = HTTPAdapter(max_retries=retry_strategy)
http = requests.Session()
http.mount("https://", adapter)
http.mount("http://", adapter)
return http | Configures the session for GET and POST requests. It enables a generous retrial strategy that waits for the application server to connect. |
155,584 | import socket
from functools import wraps
from typing import Any, Callable, Dict, Optional
from urllib.parse import urljoin
import requests
from lightning_cloud.rest_client import GridRestClient, LightningClient, create_swagger_client
from requests import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError, ConnectTimeout, ReadTimeout
from urllib3.util.retry import Retry
from lightning.app.core import constants
from lightning.app.utilities.app_helpers import Logger
logger = Logger(__name__)
def _check_service_url_is_ready(url: str, timeout: float = 5, metadata="") -> bool:
try:
response = requests.get(url, timeout=timeout)
return response.status_code in (200, 404)
except (ConnectionError, ConnectTimeout, ReadTimeout):
logger.debug(f"The url {url} is not ready. {metadata}")
return False | null |
155,585 | import socket
from functools import wraps
from typing import Any, Callable, Dict, Optional
from urllib.parse import urljoin
import requests
from lightning_cloud.rest_client import GridRestClient, LightningClient, create_swagger_client
from requests import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError, ConnectTimeout, ReadTimeout
from urllib3.util.retry import Retry
from lightning.app.core import constants
from lightning.app.utilities.app_helpers import Logger
The provided code snippet includes necessary dependencies for implementing the `_http_method_logger_wrapper` function. Write a Python function `def _http_method_logger_wrapper(func: Callable) -> Callable` to solve the following problem:
Returns the function decorated by a wrapper that logs the message using the `log_function` hook.
Here is the function:
def _http_method_logger_wrapper(func: Callable) -> Callable:
"""Returns the function decorated by a wrapper that logs the message using the `log_function` hook."""
@wraps(func)
def wrapped(self: "HTTPClient", *args: Any, **kwargs: Any) -> Any:
message = f"HTTPClient: Method: {func.__name__.upper()}, Path: {args[0]}\n"
message += f" Base URL: {self.base_url}\n"
params = kwargs.get("query_params", {})
if params:
message += f" Params: {params}\n"
resp: requests.Response = func(self, *args, **kwargs)
message += f" Response: {resp.status_code} {resp.reason}"
self.log_function(message)
return resp
return wrapped | Returns the function decorated by a wrapper that logs the message using the `log_function` hook. |
155,586 | import socket
from functools import wraps
from typing import Any, Callable, Dict, Optional
from urllib.parse import urljoin
import requests
from lightning_cloud.rest_client import GridRestClient, LightningClient, create_swagger_client
from requests import Session
from requests.adapters import HTTPAdapter
from requests.exceptions import ConnectionError, ConnectTimeout, ReadTimeout
from urllib3.util.retry import Retry
from lightning.app.core import constants
from lightning.app.utilities.app_helpers import Logger
def _response(r, *args: Any, **kwargs: Any):
return r.raise_for_status() | null |
155,587 | import contextlib
import functools
import json
import os
import re
import subprocess
import sys
from typing import Dict, Optional
import arrow
import click
import packaging
import requests
import rich
from lightning_cloud.openapi import Externalv1LightningappInstance
from lightning.app import __package_name__, __version__
from lightning.app.core.constants import APP_SERVER_PORT
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
def _is_url(id: Optional[str]) -> bool:
if isinstance(id, str) and (id.startswith("https://") or id.startswith("http://")):
return True
return False | null |
155,588 | import contextlib
import functools
import json
import os
import re
import subprocess
import sys
from typing import Dict, Optional
import arrow
import click
import packaging
import requests
import rich
from lightning_cloud.openapi import Externalv1LightningappInstance
from lightning.app import __package_name__, __version__
from lightning.app.core.constants import APP_SERVER_PORT
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
def _get_metadata_from_openapi(paths: Dict, path: str):
def _extract_command_from_openapi(openapi_resp: Dict) -> Dict[str, Dict[str, str]]:
command_paths = [p for p in openapi_resp["paths"] if p.startswith("/command/")]
return {p.replace("/command/", ""): _get_metadata_from_openapi(openapi_resp["paths"], p) for p in command_paths} | null |
155,589 | import contextlib
import functools
import json
import os
import re
import subprocess
import sys
from typing import Dict, Optional
import arrow
import click
import packaging
import requests
import rich
from lightning_cloud.openapi import Externalv1LightningappInstance
from lightning.app import __package_name__, __version__
from lightning.app.core.constants import APP_SERVER_PORT
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
def _arrow_time_callback(
_ctx: "click.core.Context", _param: "click.core.Option", value: str, arw_now=arrow.utcnow()
) -> arrow.Arrow:
try:
return arw_now.dehumanize(value)
except ValueError:
try:
return arrow.get(value)
except (ValueError, TypeError):
raise click.ClickException(f"cannot parse time {value}") | null |
155,590 | import contextlib
import functools
import json
import os
import re
import subprocess
import sys
from typing import Dict, Optional
import arrow
import click
import packaging
import requests
import rich
from lightning_cloud.openapi import Externalv1LightningappInstance
from lightning.app import __package_name__, __version__
from lightning.app.core.constants import APP_SERVER_PORT
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
logger = Logger(__name__)
def _get_newer_version() -> Optional[str]:
"""Check PyPI for newer versions of ``lightning``, returning the newest version if different from the current or
``None`` otherwise."""
if packaging.version.parse(__version__).is_prerelease:
return None
try:
response = requests.get(f"https://pypi.org/pypi/{__package_name__}/json")
response_json = response.json()
releases = response_json["releases"]
if __version__ not in releases:
# Always return None if not installed from PyPI (e.g. dev versions)
return None
latest_version = response_json["info"]["version"]
parsed_version = packaging.version.parse(latest_version)
is_invalid = response_json["info"]["yanked"] or parsed_version.is_devrelease or parsed_version.is_prerelease
return None if __version__ == latest_version or is_invalid else latest_version
except Exception:
# Return None if any exception occurs
return None
def _redirect_command(executable: str):
"""Redirect the current lightning CLI call to the given executable."""
subprocess.run(
[executable, "-m", "lightning"] + sys.argv[1:],
env=os.environ,
)
sys.exit()
__package_name__ = "lightning.app".split(".")[0]
if __package_name__ == "lightning":
_PACKAGE_ROOT = os.path.dirname(_PACKAGE_ROOT)
_PROJECT_ROOT = os.path.dirname(_PROJECT_ROOT)
The provided code snippet includes necessary dependencies for implementing the `_check_version_and_upgrade` function. Write a Python function `def _check_version_and_upgrade()` to solve the following problem:
Checks that the current version of ``lightning`` is the latest on PyPI. If not, prompt the user to upgrade ``lightning`` for them and re-run the current call in the new version.
Here is the function:
def _check_version_and_upgrade():
"""Checks that the current version of ``lightning`` is the latest on PyPI.
If not, prompt the user to upgrade ``lightning`` for them and re-run the current call in the new version.
"""
new_version = _get_newer_version()
if new_version:
prompt = f"A newer version of {__package_name__} is available ({new_version}). Would you like to upgrade?"
if click.confirm(prompt, default=True):
command = f"pip install {__package_name__}=={new_version}"
logger.info(f"⚡ RUN: {command}")
# Upgrade
subprocess.run(
[sys.executable, "-m"] + command.split(" "),
check=True,
)
# Re-launch
_redirect_command(sys.executable)
return | Checks that the current version of ``lightning`` is the latest on PyPI. If not, prompt the user to upgrade ``lightning`` for them and re-run the current call in the new version. |
155,591 | import contextlib
import functools
import json
import os
import re
import subprocess
import sys
from typing import Dict, Optional
import arrow
import click
import packaging
import requests
import rich
from lightning_cloud.openapi import Externalv1LightningappInstance
from lightning.app import __package_name__, __version__
from lightning.app.core.constants import APP_SERVER_PORT
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
logger = Logger(__name__)
def _redirect_command(executable: str):
"""Redirect the current lightning CLI call to the given executable."""
subprocess.run(
[executable, "-m", "lightning"] + sys.argv[1:],
env=os.environ,
)
sys.exit()
__package_name__ = "lightning.app".split(".")[0]
if __package_name__ == "lightning":
_PACKAGE_ROOT = os.path.dirname(_PACKAGE_ROOT)
_PROJECT_ROOT = os.path.dirname(_PROJECT_ROOT)
The provided code snippet includes necessary dependencies for implementing the `_check_environment_and_redirect` function. Write a Python function `def _check_environment_and_redirect()` to solve the following problem:
Checks that the current ``sys.executable`` is the same as the executable resolved from the current environment. If not, this utility tries to redirect the ``lightning`` call to the environment executable (prompting the user to install lightning for them there if needed).
Here is the function:
def _check_environment_and_redirect():
"""Checks that the current ``sys.executable`` is the same as the executable resolved from the current environment.
If not, this utility tries to redirect the ``lightning`` call to the environment executable (prompting the user to
install lightning for them there if needed).
"""
process = subprocess.run(
["python", "-c", "import sys; print(sys.executable)"],
capture_output=True,
env=os.environ,
check=True,
)
env_executable = os.path.realpath(process.stdout.decode().strip())
sys_executable = os.path.realpath(sys.executable)
# on windows, the extension might be different, where one uses `.EXE` and the other `.exe`
if env_executable.lower() != sys_executable.lower():
logger.info(
"Lightning is running from outside your current environment. Switching to your current environment."
)
process = subprocess.run(
[env_executable, "-m", "lightning", "--version"],
capture_output=True,
text=True,
)
if "No module named lightning" in process.stderr:
prompt = f"The {__package_name__} package is not installed. Would you like to install it? [Y/n (exit)]"
if click.confirm(prompt, default=True, show_default=False):
command = f"pip install {__package_name__}"
logger.info(f"⚡ RUN: {command}")
subprocess.run(
[env_executable, "-m"] + command.split(" "),
check=True,
)
else:
sys.exit()
_redirect_command(env_executable)
return | Checks that the current ``sys.executable`` is the same as the executable resolved from the current environment. If not, this utility tries to redirect the ``lightning`` call to the environment executable (prompting the user to install lightning for them there if needed). |
155,592 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def target_fn():
try:
# streamlit >= 1.14.0
from streamlit import runtime
get_instance = runtime.get_instance
exists = runtime.exists()
except ImportError:
# Older versions
from streamlit.server.server import Server
get_instance = Server.get_current
exists = bool(Server._singleton)
async def update_fn():
runtime_instance = get_instance()
sessions = list(runtime_instance._session_info_by_id.values())
url = (
"localhost:8080"
if "LIGHTNING_APP_STATE_URL" in os.environ
else f"localhost:{lightning.app.core.constants.APP_SERVER_PORT}"
)
ws_url = f"ws://{url}/api/v1/ws"
last_updated = time.time()
async with websockets.connect(ws_url) as websocket:
while True:
try:
_ = await websocket.recv()
while (time.time() - last_updated) < 1:
time.sleep(0.1)
for session in sessions:
session = session.session
session.request_rerun(session._client_state)
last_updated = time.time()
except websockets.exceptions.ConnectionClosedOK:
# The websocket is not enabled
break
if exists:
asyncio.run(update_fn()) | null |
155,593 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
class LightningJSONEncoder(json.JSONEncoder):
def default(self, obj: Any) -> Any:
if callable(getattr(obj, "__json__", None)):
return obj.__json__()
return json.JSONEncoder.default(self, obj)
The provided code snippet includes necessary dependencies for implementing the `_is_json_serializable` function. Write a Python function `def _is_json_serializable(x: Any) -> bool` to solve the following problem:
Test whether a variable can be encoded as json.
Here is the function:
def _is_json_serializable(x: Any) -> bool:
"""Test whether a variable can be encoded as json."""
if type(x) in lightning.app.core.constants.SUPPORTED_PRIMITIVE_TYPES:
# shortcut for primitive types that are not containers
return True
try:
json.dumps(x, cls=LightningJSONEncoder)
return True
except (TypeError, OverflowError):
# OverflowError is raised if number is too large to encode
return False | Test whether a variable can be encoded as json. |
155,594 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
class LightningFlow:
_INTERNAL_STATE_VARS = {
# Internal protected variables that are still part of the state (even though they are prefixed with "_")
"_paths",
"_layout",
}
def __init__(self) -> None:
"""The LightningFlow is used by the :class:`~lightning.app.core.app.LightningApp` to coordinate and manage
long- running jobs contained, the :class:`~lightning.app.core.work.LightningWork`.
A LightningFlow is characterized by:
* A set of state variables.
* Long-running jobs (:class:`~lightning.app.core.work.LightningWork`).
* Its children ``LightningFlow`` or ``LightningWork`` with their state variables.
**State variables**
The LightningFlow are special classes whose attributes require to be
json-serializable (e.g., int, float, bool, list, dict, ...).
They also may not reach into global variables unless they are constant.
The attributes need to be all defined in `__init__` method,
and eventually assigned to different values throughout the lifetime of the object.
However, defining new attributes outside of `__init__` is not allowed.
Attributes taken together represent the state of the component.
Components are capable of retrieving their state and that of their
children recursively at any time. They are also capable of setting
an externally provided state recursively to its children.
**Execution model and work**
The entry point for execution is the ``run`` method at the root component.
The ``run`` method of the root component may call the ``run`` method of its children, and the children
may call the ``run`` methods of their children and so on.
The ``run`` method of the root component is called repeatedly in a while loop forever until the app gets
terminated. In this programming model (reminiscent of React, Vue or Streamlit from the JavaScript world),
the values of the state variables, or their changes, are translated into actions throughout the component
hierarchy. This means the flow of execution will only be affected by state changes in a component or one of
its children, and otherwise remain idempotent.
The actions themselves are self-contained within :class:`~lightning.app.core.work.LightningWork`.
The :class:`~lightning.app.core.work.LightningWork` are typically used for long-running jobs,
like downloading a dataset, performing a query, starting a computationally heavy script.
While one may access any state variable in a LightningWork from a LightningFlow, one may not
directly call methods of other components from within a LightningWork as LightningWork can't have any children.
This limitation allows applications to be distributed at scale.
**Component hierarchy and App**
Given the above characteristics, a root LightningFlow, potentially containing
children components, can be passed to an App object and its execution
can be distributed (each LightningWork will be run within its own process
or different arrangements).
Example:
>>> from lightning.app import LightningFlow
>>> class RootFlow(LightningFlow):
... def __init__(self):
... super().__init__()
... self.counter = 0
... def run(self):
... self.counter += 1
...
>>> flow = RootFlow()
>>> flow.run()
>>> assert flow.counter == 1
>>> assert flow.state["vars"]["counter"] == 1
"""
self._state: set = set()
self._name: str = ""
self._flows: set = set()
self._works: set = set()
self._structures: set = set()
self._calls: dict = {}
self._changes: dict = {}
self._layout: Union[List[Dict], Dict] = {}
self._paths: dict = {}
self._backend: Optional["Backend"] = None
# tuple instead of a list so that it cannot be modified without using the setter
self._lightningignore: Tuple[str, ...] = ()
def name(self) -> str:
"""Return the current LightningFlow name."""
return self._name or "root"
def __setattr__(self, name: str, value: Any) -> None:
attr = getattr(self.__class__, name, None)
if isinstance(attr, property) and attr.fset is not None:
return attr.fset(self, value)
from lightning.app.structures import Dict as ComponentDict
from lightning.app.structures import List as ComponentList
if (
not _is_init_context(self)
and name not in self._state
and name not in self._paths
and (
not isinstance(value, (LightningWork, LightningFlow))
or (isinstance(value, (LightningWork, LightningFlow)) and not _is_run_context(self))
)
and name not in self._works.union(self._flows)
and self._is_state_attribute(name)
):
raise AttributeError(f"Cannot set attributes that were not defined in __init__: {name}")
if isinstance(value, str) and value.startswith("lit://"):
value = Path(value)
if self._is_state_attribute(name):
if hasattr(self, name):
if name in self._flows and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the flow can't be changed once defined: {name}")
if name in self._works and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the work can't be changed once defined: {name}")
if isinstance(value, (list, dict)) and value:
_type = (LightningFlow, LightningWork, ComponentList, ComponentDict)
if isinstance(value, list) and all(isinstance(va, _type) for va in value):
value = ComponentList(*value)
if isinstance(value, dict) and all(isinstance(va, _type) for va in value.values()):
value = ComponentDict(**value)
if isinstance(value, LightningFlow):
self._flows.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
# Attach the backend to the flow and its children work.
if self._backend:
LightningFlow._attach_backend(value, self._backend)
for work in value.works():
work._register_cloud_compute()
elif isinstance(value, LightningWork):
self._works.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
if self._backend:
self._backend._wrap_run_method(_LightningAppRef().get_current(), value) # type: ignore[arg-type]
value._register_cloud_compute()
elif isinstance(value, (ComponentDict, ComponentList)):
self._structures.add(name)
_set_child_name(self, value, name)
_backend = getattr(self, "backend", None)
if _backend is not None:
value._backend = _backend
for flow in value.flows:
if _backend is not None:
LightningFlow._attach_backend(flow, _backend)
for work in value.works:
work._register_cloud_compute()
if _backend is not None:
_backend._wrap_run_method(_LightningAppRef().get_current(), work)
elif isinstance(value, Path):
# In the init context, the full name of the Flow and Work is not known, i.e., we can't serialize
# the path without losing the information of origin and consumer. Hence, we delay the serialization
# of the path object until the app is instantiated.
if not _is_init_context(self):
self._paths[name] = value.to_dict()
self._state.add(name)
elif isinstance(value, Drive):
value = deepcopy(value)
value.component_name = self.name
self._state.add(name)
elif isinstance(value, CloudCompute):
self._state.add(name)
elif _is_json_serializable(value):
self._state.add(name)
if not isinstance(value, Path) and hasattr(self, "_paths") and name in self._paths:
# The attribute changed type from Path to another
self._paths.pop(name)
else:
raise AttributeError(
f"Only JSON-serializable attributes are currently supported"
f" (str, int, float, bool, tuple, list, dict etc.) to be part of {self} state. "
f"Found the attribute {name} with {value} instead. \n"
"HINT: Private attributes defined as follows `self._x = y` won't be shared between components "
"and therefore don't need to be JSON-serializable."
)
super().__setattr__(name, value)
return None
def _attach_backend(flow: "LightningFlow", backend: "Backend") -> None:
"""Attach the backend to all flows and its children."""
flow._backend = backend
for name in flow._structures:
getattr(flow, name)._backend = backend
for child_flow in flow.flows.values():
child_flow._backend = backend
for name in child_flow._structures:
getattr(child_flow, name)._backend = backend
app = _LightningAppRef().get_current()
for child_work in flow.works():
child_work._backend = backend
backend._wrap_run_method(app, child_work) # type: ignore[arg-type]
def __getattr__(self, item: str) -> Any:
if item in self.__dict__.get("_paths", {}):
return Path.from_dict(self._paths[item])
return self.__getattribute__(item)
def ready(self) -> bool:
"""Override to customize when your App should be ready."""
flows = self.flows
return all(flow.ready for flow in flows.values()) if flows else True
def changes(self) -> dict:
return self._changes.copy()
def state(self) -> dict:
"""Returns the current flow state along its children."""
children_state = {child: getattr(self, child).state for child in self._flows}
works_state = {work: getattr(self, work).state for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state for child in self._structures},
"changes": {},
}
def state_vars(self) -> dict:
children_state = {child: getattr(self, child).state_vars for child in self._flows}
works_state = {work: getattr(self, work).state_vars for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_vars for child in self._structures},
}
def state_with_changes(self) -> dict:
children_state = {child: getattr(self, child).state_with_changes for child in self._flows}
works_state = {work: getattr(self, work).state_with_changes for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_with_changes for child in self._structures},
"changes": self.changes,
}
def flows(self) -> Dict[str, "LightningFlow"]:
"""Return its children LightningFlow."""
flows = {}
for el in sorted(self._flows):
flow = getattr(self, el)
flows[flow.name] = flow
flows.update(flow.flows)
for struct_name in sorted(self._structures):
flows.update(getattr(self, struct_name).flows)
return flows
def lightningignore(self) -> Tuple[str, ...]:
"""Programmatic equivalent of the ``.lightningignore`` file."""
return self._lightningignore
def lightningignore(self, lightningignore: Tuple[str, ...]) -> None:
if self._backend is not None:
raise RuntimeError(
f"Your app has been already dispatched, so modifying the `{self.name}.lightningignore` does not have an"
" effect"
)
self._lightningignore = lightningignore
def works(self, recurse: bool = True) -> List[LightningWork]:
"""Return its :class:`~lightning.app.core.work.LightningWork`."""
works = [getattr(self, el) for el in sorted(self._works)]
if not recurse:
return works
for child_name in sorted(self._flows):
for w in getattr(self, child_name).works(recurse=recurse):
works.append(w)
for struct_name in sorted(self._structures):
for w in getattr(self, struct_name).works:
works.append(w)
return works
def named_works(self, recurse: bool = True) -> List[Tuple[str, LightningWork]]:
"""Return its :class:`~lightning.app.core.work.LightningWork` with their names."""
return [(w.name, w) for w in self.works(recurse=recurse)]
def set_state(self, provided_state: Dict, recurse: bool = True) -> None:
"""Method to set the state to this LightningFlow, its children and
:class:`~lightning.app.core.work.LightningWork`.
Arguments:
provided_state: The state to be reloaded
recurse: Whether to apply the state down children.
"""
for k, v in provided_state["vars"].items():
if isinstance(v, Dict):
v = _maybe_create_drive(self.name, v)
if isinstance(v, Dict):
v = _maybe_create_cloud_compute(v)
setattr(self, k, v)
self._changes = provided_state["changes"]
self._calls.update(provided_state["calls"])
if not recurse:
return
for child, state in provided_state["flows"].items():
getattr(self, child).set_state(state)
for work, state in provided_state["works"].items():
getattr(self, work).set_state(state)
for structure, state in provided_state["structures"].items():
getattr(self, structure).set_state(state)
def stop(self, end_msg: str = "") -> None:
"""Method used to exit the application."""
if end_msg:
print(end_msg)
raise ExitAppException
def fail(self, end_msg: str = "") -> None:
"""Method used to exit and fail the application."""
if end_msg:
print(end_msg)
raise LightningFlowException
def _exit(self, end_msg: str = "") -> None:
"""Used to exit the application.
Private method.
.. deprecated:: 1.9.0
This function is deprecated and will be removed in 2.0.0. Use :meth:`stop` instead.
"""
warnings.warn(
DeprecationWarning(
"This function is deprecated and will be removed in 2.0.0. Use `LightningFlow.stop` instead."
)
)
return self.stop(end_msg=end_msg)
def _is_state_attribute(name: str) -> bool:
"""Every public attribute is part of the state by default and all protected (prefixed by '_') or private
(prefixed by '__') attributes are not.
Exceptions are listed in the `_INTERNAL_STATE_VARS` class variable.
"""
return name in LightningFlow._INTERNAL_STATE_VARS or not name.startswith("_")
def run(self, *args: Any, **kwargs: Any) -> None:
"""Override with your own logic."""
pass
def schedule(
self, cron_pattern: str, start_time: Optional[datetime] = None, user_key: Optional[str] = None
) -> bool:
"""The schedule method is used to run a part of the flow logic on timely manner.
.. code-block:: python
from lightning.app import LightningFlow
class Flow(LightningFlow):
def run(self):
if self.schedule("hourly"):
print("run some code every hour")
Arguments:
cron_pattern: The cron pattern to provide. Learn more at https://crontab.guru/.
start_time: The start time of the cron job.
user_key: Optional key used to improve the caching mechanism.
A best practice is to avoid running a dynamic flow or work under the self.schedule method.
Instead, instantiate them within the condition, but run them outside.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.structures import List
class SchedulerDAG(LightningFlow):
def __init__(self):
super().__init__()
self.dags = List()
def run(self):
if self.schedule("hourly"):
self.dags.append(DAG(...))
for dag in self.dags:
payload = dag.run()
**Learn more about Scheduling**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Schedule your components
:description: Learn more scheduling.
:col_css: col-md-4
:button_link: ../../../glossary/scheduling.html
:height: 180
:tag: Basic
.. displayitem::
:header: Build your own DAG
:description: Learn more DAG scheduling with examples.
:col_css: col-md-4
:button_link: ../../../examples/app/dag/dag.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{cron_pattern}.{frame.f_code.co_filename}.{frame.f_lineno}"
else:
cache_key = user_key
call_hash = f"{self.schedule.__name__}:{DeepHash(cache_key)[cache_key]}"
if "scheduling" not in self._calls:
self._calls["scheduling"] = {}
entered = call_hash in self._calls["scheduling"]
expr_aliases = {
"midnight": "@midnight",
"hourly": "@hourly",
"daily": "@daily",
"weekly": "@weekly",
"monthly": "@monthly",
"yearly": "@yearly",
"annually": "@annually",
}
if cron_pattern in expr_aliases:
cron_pattern = expr_aliases[cron_pattern]
if not entered:
if not start_time:
start_time = datetime.now()
schedule_metadata = {
"running": False,
"cron_pattern": cron_pattern,
"start_time": str(start_time.isoformat()),
"name": self.name,
}
self._calls["scheduling"][call_hash] = schedule_metadata
app = _LightningAppRef().get_current()
if app:
app._register_schedule(call_hash, schedule_metadata)
return True
return self._calls["scheduling"][call_hash]["running"]
def _enable_schedule(self, call_hash: str) -> None:
self._calls["scheduling"][call_hash]["running"] = True
def _disable_running_schedules(self) -> None:
if "scheduling" not in self._calls:
return
for call_hash in self._calls["scheduling"]:
self._calls["scheduling"][call_hash]["running"] = False
def configure_layout(self) -> Union[Dict[str, Any], List[Dict[str, Any]], Frontend]:
"""Configure the UI layout of this LightningFlow.
You can either
1. Return a single :class:`~lightning.app.frontend.frontend.Frontend` object to serve a user interface
for this Flow.
2. Return a single dictionary to expose the UI of a child flow.
3. Return a list of dictionaries to arrange the children of this flow in one or multiple tabs.
**Example:** Serve a static directory (with at least a file index.html inside).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StaticWebFrontend("path/to/folder/to/serve")
**Example:** Serve a streamlit UI (needs the streamlit package to be installed).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StreamlitFrontend(render_fn=my_streamlit_ui)
def my_streamlit_ui(state):
# add your streamlit code here!
import streamlit as st
**Example:** Arrange the UI of my children in tabs (default UI by Lightning).
.. code-block:: python
class Flow(LightningFlow):
def configure_layout(self):
return [
dict(name="First Tab", content=self.child0),
dict(name="Second Tab", content=self.child1),
dict(name="Lightning", content="https://lightning.ai"),
]
If you don't implement ``configure_layout``, Lightning will collect all children and display their UI in a tab
(if they have their own ``configure_layout`` implemented).
Note:
This hook gets called at the time of app creation and then again as part of the loop. If desired, the
returned layout configuration can depend on the state. The only exception are the flows that return a
:class:`~lightning.app.frontend.frontend.Frontend`. These need to be provided at the time of app creation
in order for the runtime to start the server.
**Learn more about adding UI**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Add a web user interface (UI)
:description: Learn more how to integrate several UIs.
:col_css: col-md-4
:button_link: ../../../workflows/add_web_ui/index.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
return [{"name": name, "content": component} for (name, component) in self.flows.items()]
def experimental_iterate(self, iterable: Iterable, run_once: bool = True, user_key: str = "") -> Generator:
"""This method should always be used with any kind of iterable to ensure its fault tolerant.
If you want your iterable to always be consumed from scratch, you shouldn't use this method.
Arguments:
iterable: Iterable to iterate over. The iterable shouldn't have side effects or be random.
run_once: Whether to run the entire iteration only once.
Otherwise, it would restart from the beginning.
user_key: Key to be used to track the caching mechanism.
"""
if not isinstance(iterable, Iterable):
raise TypeError(f"An iterable should be provided to `self.iterate` method. Found {iterable}")
# TODO: Find a better way. Investigated using __reduce__, but state change invalidate the cache.
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{frame.f_code.co_filename}.{frame.f_code.co_firstlineno}"
else:
cache_key = user_key
call_hash = f"{self.experimental_iterate.__name__}:{DeepHash(cache_key)[cache_key]}"
entered = call_hash in self._calls
has_started = entered and self._calls[call_hash]["counter"] > 0
has_finished = entered and self._calls[call_hash]["has_finished"]
if has_finished:
if not run_once:
self._calls[call_hash].update({"counter": 0, "has_finished": False})
else:
return range(0)
if not has_started:
self._calls[call_hash] = {
"name": self.experimental_iterate.__name__,
"call_hash": call_hash,
"counter": 0,
"has_finished": False,
}
skip_counter = max(self._calls[call_hash]["counter"], 0)
for counter, value in enumerate(iterable):
if skip_counter:
skip_counter -= 1
continue
self._calls[call_hash].update({"counter": counter})
yield value
self._calls[call_hash].update({"has_finished": True})
def configure_commands(self) -> None:
"""Configure the commands of this LightningFlow.
Returns a list of dictionaries mapping a command name to a flow method.
.. code-block:: python
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def configure_commands(self):
return {"my_command_name": self.my_remote_method}
def my_remote_method(self, name):
self.names.append(name)
Once the app is running with the following command:
.. code-block:: bash
lightning_app run app app.py
.. code-block:: bash
lightning_app my_command_name --args name=my_own_name
"""
raise NotImplementedError
def configure_api(self) -> None:
"""Configure the API routes of the LightningFlow.
Returns a list of HttpMethod such as Post or Get.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.api import Post
from pydantic import BaseModel
class HandlerModel(BaseModel):
name: str
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def handler(self, config: HandlerModel) -> None:
self.names.append(config.name)
def configure_api(self):
return [Post("/v1/api/request", self.handler)]
Once the app is running, you can access the Swagger UI of the app
under the ``/docs`` route.
"""
raise NotImplementedError
def state_dict(self) -> dict:
"""Returns the current flow state but not its children."""
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"calls": self._calls.copy(),
"changes": {},
"flows": {},
"works": {},
"structures": {},
}
def load_state_dict(
self,
flow_state: Dict[str, Any],
children_states: Dict[str, Any],
strict: bool = True,
) -> None:
"""Reloads the state of this flow and its children.
.. code-block:: python
class Work(LightningWork):
def __init__(self):
super().__init__()
self.counter = 0
def run(self):
self.counter += 1
class Flow(LightningFlow):
def run(self):
# dynamically create a work.
if not getattr(self, "w", None):
self.w = WorkReload()
self.w.run()
def load_state_dict(self, flow_state, children_states, strict) -> None:
# 1: Re-instantiate the dynamic work
self.w = Work()
# 2: Make any states modification / migration.
...
# 3: Call the parent ``load_state_dict`` to
# recursively reload the states.
super().load_state_dict(
flow_state,
children_states,
strict,
)
Arguments:
flow_state: The state of the current flow.
children_states: The state of the dynamic children of this flow.
strict: Whether to raise an exception if a dynamic
children hasn't been re-created.
"""
self.set_state(flow_state, recurse=False)
direct_children_states = {k: v for k, v in children_states.items() if "." not in k}
for child_name, state in direct_children_states.items():
child = getattr(self, child_name, None)
if isinstance(child, LightningFlow):
lower_children_states = {
k.replace(child_name + ".", ""): v
for k, v in children_states.items()
if k.startswith(child_name) and k != child_name
}
child.load_state_dict(state, lower_children_states, strict=strict)
elif isinstance(child, LightningWork):
child.set_state(state)
elif strict:
raise ValueError(f"The component {child_name} wasn't instantiated for the component {self.name}")
The provided code snippet includes necessary dependencies for implementing the `_set_child_name` function. Write a Python function `def _set_child_name(component: "Component", child: "Component", new_name: str) -> str` to solve the following problem:
Computes and sets the name of a child given the parent, and returns the name.
Here is the function:
def _set_child_name(component: "Component", child: "Component", new_name: str) -> str:
"""Computes and sets the name of a child given the parent, and returns the name."""
child_name = f"{component.name}.{new_name}"
child._name = child_name
# the name changed, so recursively update the names of the children of this child
if isinstance(child, lightning.app.core.LightningFlow):
for n in child._flows:
c = getattr(child, n)
_set_child_name(child, c, n)
for n in child._works:
c = getattr(child, n)
_set_child_name(child, c, n)
for n in child._structures:
s = getattr(child, n)
_set_child_name(child, s, n)
if isinstance(child, lightning.app.structures.Dict):
for n, c in child.items():
_set_child_name(child, c, n)
if isinstance(child, lightning.app.structures.List):
for c in child:
_set_child_name(child, c, c.name.split(".")[-1])
return child_name | Computes and sets the name of a child given the parent, and returns the name. |
155,595 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def _walk_to_component(
root: "LightningFlow",
component: "Component",
) -> Generator[Tuple["Component", "Component"], None, None]:
"""Returns a generator that runs through the tree starting from the root down to the given component.
At each node, yields parent and child as a tuple.
"""
from lightning.app.structures import Dict, List
name_parts = component.name.split(".")[1:] # exclude 'root' from the name
parent = root
for n in name_parts:
if isinstance(parent, (Dict, List)):
child = parent[n] if isinstance(parent, Dict) else parent[int(n)]
else:
child = getattr(parent, n)
yield parent, child
parent = child
class LightningFlow:
_INTERNAL_STATE_VARS = {
# Internal protected variables that are still part of the state (even though they are prefixed with "_")
"_paths",
"_layout",
}
def __init__(self) -> None:
"""The LightningFlow is used by the :class:`~lightning.app.core.app.LightningApp` to coordinate and manage
long- running jobs contained, the :class:`~lightning.app.core.work.LightningWork`.
A LightningFlow is characterized by:
* A set of state variables.
* Long-running jobs (:class:`~lightning.app.core.work.LightningWork`).
* Its children ``LightningFlow`` or ``LightningWork`` with their state variables.
**State variables**
The LightningFlow are special classes whose attributes require to be
json-serializable (e.g., int, float, bool, list, dict, ...).
They also may not reach into global variables unless they are constant.
The attributes need to be all defined in `__init__` method,
and eventually assigned to different values throughout the lifetime of the object.
However, defining new attributes outside of `__init__` is not allowed.
Attributes taken together represent the state of the component.
Components are capable of retrieving their state and that of their
children recursively at any time. They are also capable of setting
an externally provided state recursively to its children.
**Execution model and work**
The entry point for execution is the ``run`` method at the root component.
The ``run`` method of the root component may call the ``run`` method of its children, and the children
may call the ``run`` methods of their children and so on.
The ``run`` method of the root component is called repeatedly in a while loop forever until the app gets
terminated. In this programming model (reminiscent of React, Vue or Streamlit from the JavaScript world),
the values of the state variables, or their changes, are translated into actions throughout the component
hierarchy. This means the flow of execution will only be affected by state changes in a component or one of
its children, and otherwise remain idempotent.
The actions themselves are self-contained within :class:`~lightning.app.core.work.LightningWork`.
The :class:`~lightning.app.core.work.LightningWork` are typically used for long-running jobs,
like downloading a dataset, performing a query, starting a computationally heavy script.
While one may access any state variable in a LightningWork from a LightningFlow, one may not
directly call methods of other components from within a LightningWork as LightningWork can't have any children.
This limitation allows applications to be distributed at scale.
**Component hierarchy and App**
Given the above characteristics, a root LightningFlow, potentially containing
children components, can be passed to an App object and its execution
can be distributed (each LightningWork will be run within its own process
or different arrangements).
Example:
>>> from lightning.app import LightningFlow
>>> class RootFlow(LightningFlow):
... def __init__(self):
... super().__init__()
... self.counter = 0
... def run(self):
... self.counter += 1
...
>>> flow = RootFlow()
>>> flow.run()
>>> assert flow.counter == 1
>>> assert flow.state["vars"]["counter"] == 1
"""
self._state: set = set()
self._name: str = ""
self._flows: set = set()
self._works: set = set()
self._structures: set = set()
self._calls: dict = {}
self._changes: dict = {}
self._layout: Union[List[Dict], Dict] = {}
self._paths: dict = {}
self._backend: Optional["Backend"] = None
# tuple instead of a list so that it cannot be modified without using the setter
self._lightningignore: Tuple[str, ...] = ()
def name(self) -> str:
"""Return the current LightningFlow name."""
return self._name or "root"
def __setattr__(self, name: str, value: Any) -> None:
attr = getattr(self.__class__, name, None)
if isinstance(attr, property) and attr.fset is not None:
return attr.fset(self, value)
from lightning.app.structures import Dict as ComponentDict
from lightning.app.structures import List as ComponentList
if (
not _is_init_context(self)
and name not in self._state
and name not in self._paths
and (
not isinstance(value, (LightningWork, LightningFlow))
or (isinstance(value, (LightningWork, LightningFlow)) and not _is_run_context(self))
)
and name not in self._works.union(self._flows)
and self._is_state_attribute(name)
):
raise AttributeError(f"Cannot set attributes that were not defined in __init__: {name}")
if isinstance(value, str) and value.startswith("lit://"):
value = Path(value)
if self._is_state_attribute(name):
if hasattr(self, name):
if name in self._flows and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the flow can't be changed once defined: {name}")
if name in self._works and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the work can't be changed once defined: {name}")
if isinstance(value, (list, dict)) and value:
_type = (LightningFlow, LightningWork, ComponentList, ComponentDict)
if isinstance(value, list) and all(isinstance(va, _type) for va in value):
value = ComponentList(*value)
if isinstance(value, dict) and all(isinstance(va, _type) for va in value.values()):
value = ComponentDict(**value)
if isinstance(value, LightningFlow):
self._flows.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
# Attach the backend to the flow and its children work.
if self._backend:
LightningFlow._attach_backend(value, self._backend)
for work in value.works():
work._register_cloud_compute()
elif isinstance(value, LightningWork):
self._works.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
if self._backend:
self._backend._wrap_run_method(_LightningAppRef().get_current(), value) # type: ignore[arg-type]
value._register_cloud_compute()
elif isinstance(value, (ComponentDict, ComponentList)):
self._structures.add(name)
_set_child_name(self, value, name)
_backend = getattr(self, "backend", None)
if _backend is not None:
value._backend = _backend
for flow in value.flows:
if _backend is not None:
LightningFlow._attach_backend(flow, _backend)
for work in value.works:
work._register_cloud_compute()
if _backend is not None:
_backend._wrap_run_method(_LightningAppRef().get_current(), work)
elif isinstance(value, Path):
# In the init context, the full name of the Flow and Work is not known, i.e., we can't serialize
# the path without losing the information of origin and consumer. Hence, we delay the serialization
# of the path object until the app is instantiated.
if not _is_init_context(self):
self._paths[name] = value.to_dict()
self._state.add(name)
elif isinstance(value, Drive):
value = deepcopy(value)
value.component_name = self.name
self._state.add(name)
elif isinstance(value, CloudCompute):
self._state.add(name)
elif _is_json_serializable(value):
self._state.add(name)
if not isinstance(value, Path) and hasattr(self, "_paths") and name in self._paths:
# The attribute changed type from Path to another
self._paths.pop(name)
else:
raise AttributeError(
f"Only JSON-serializable attributes are currently supported"
f" (str, int, float, bool, tuple, list, dict etc.) to be part of {self} state. "
f"Found the attribute {name} with {value} instead. \n"
"HINT: Private attributes defined as follows `self._x = y` won't be shared between components "
"and therefore don't need to be JSON-serializable."
)
super().__setattr__(name, value)
return None
def _attach_backend(flow: "LightningFlow", backend: "Backend") -> None:
"""Attach the backend to all flows and its children."""
flow._backend = backend
for name in flow._structures:
getattr(flow, name)._backend = backend
for child_flow in flow.flows.values():
child_flow._backend = backend
for name in child_flow._structures:
getattr(child_flow, name)._backend = backend
app = _LightningAppRef().get_current()
for child_work in flow.works():
child_work._backend = backend
backend._wrap_run_method(app, child_work) # type: ignore[arg-type]
def __getattr__(self, item: str) -> Any:
if item in self.__dict__.get("_paths", {}):
return Path.from_dict(self._paths[item])
return self.__getattribute__(item)
def ready(self) -> bool:
"""Override to customize when your App should be ready."""
flows = self.flows
return all(flow.ready for flow in flows.values()) if flows else True
def changes(self) -> dict:
return self._changes.copy()
def state(self) -> dict:
"""Returns the current flow state along its children."""
children_state = {child: getattr(self, child).state for child in self._flows}
works_state = {work: getattr(self, work).state for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state for child in self._structures},
"changes": {},
}
def state_vars(self) -> dict:
children_state = {child: getattr(self, child).state_vars for child in self._flows}
works_state = {work: getattr(self, work).state_vars for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_vars for child in self._structures},
}
def state_with_changes(self) -> dict:
children_state = {child: getattr(self, child).state_with_changes for child in self._flows}
works_state = {work: getattr(self, work).state_with_changes for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_with_changes for child in self._structures},
"changes": self.changes,
}
def flows(self) -> Dict[str, "LightningFlow"]:
"""Return its children LightningFlow."""
flows = {}
for el in sorted(self._flows):
flow = getattr(self, el)
flows[flow.name] = flow
flows.update(flow.flows)
for struct_name in sorted(self._structures):
flows.update(getattr(self, struct_name).flows)
return flows
def lightningignore(self) -> Tuple[str, ...]:
"""Programmatic equivalent of the ``.lightningignore`` file."""
return self._lightningignore
def lightningignore(self, lightningignore: Tuple[str, ...]) -> None:
if self._backend is not None:
raise RuntimeError(
f"Your app has been already dispatched, so modifying the `{self.name}.lightningignore` does not have an"
" effect"
)
self._lightningignore = lightningignore
def works(self, recurse: bool = True) -> List[LightningWork]:
"""Return its :class:`~lightning.app.core.work.LightningWork`."""
works = [getattr(self, el) for el in sorted(self._works)]
if not recurse:
return works
for child_name in sorted(self._flows):
for w in getattr(self, child_name).works(recurse=recurse):
works.append(w)
for struct_name in sorted(self._structures):
for w in getattr(self, struct_name).works:
works.append(w)
return works
def named_works(self, recurse: bool = True) -> List[Tuple[str, LightningWork]]:
"""Return its :class:`~lightning.app.core.work.LightningWork` with their names."""
return [(w.name, w) for w in self.works(recurse=recurse)]
def set_state(self, provided_state: Dict, recurse: bool = True) -> None:
"""Method to set the state to this LightningFlow, its children and
:class:`~lightning.app.core.work.LightningWork`.
Arguments:
provided_state: The state to be reloaded
recurse: Whether to apply the state down children.
"""
for k, v in provided_state["vars"].items():
if isinstance(v, Dict):
v = _maybe_create_drive(self.name, v)
if isinstance(v, Dict):
v = _maybe_create_cloud_compute(v)
setattr(self, k, v)
self._changes = provided_state["changes"]
self._calls.update(provided_state["calls"])
if not recurse:
return
for child, state in provided_state["flows"].items():
getattr(self, child).set_state(state)
for work, state in provided_state["works"].items():
getattr(self, work).set_state(state)
for structure, state in provided_state["structures"].items():
getattr(self, structure).set_state(state)
def stop(self, end_msg: str = "") -> None:
"""Method used to exit the application."""
if end_msg:
print(end_msg)
raise ExitAppException
def fail(self, end_msg: str = "") -> None:
"""Method used to exit and fail the application."""
if end_msg:
print(end_msg)
raise LightningFlowException
def _exit(self, end_msg: str = "") -> None:
"""Used to exit the application.
Private method.
.. deprecated:: 1.9.0
This function is deprecated and will be removed in 2.0.0. Use :meth:`stop` instead.
"""
warnings.warn(
DeprecationWarning(
"This function is deprecated and will be removed in 2.0.0. Use `LightningFlow.stop` instead."
)
)
return self.stop(end_msg=end_msg)
def _is_state_attribute(name: str) -> bool:
"""Every public attribute is part of the state by default and all protected (prefixed by '_') or private
(prefixed by '__') attributes are not.
Exceptions are listed in the `_INTERNAL_STATE_VARS` class variable.
"""
return name in LightningFlow._INTERNAL_STATE_VARS or not name.startswith("_")
def run(self, *args: Any, **kwargs: Any) -> None:
"""Override with your own logic."""
pass
def schedule(
self, cron_pattern: str, start_time: Optional[datetime] = None, user_key: Optional[str] = None
) -> bool:
"""The schedule method is used to run a part of the flow logic on timely manner.
.. code-block:: python
from lightning.app import LightningFlow
class Flow(LightningFlow):
def run(self):
if self.schedule("hourly"):
print("run some code every hour")
Arguments:
cron_pattern: The cron pattern to provide. Learn more at https://crontab.guru/.
start_time: The start time of the cron job.
user_key: Optional key used to improve the caching mechanism.
A best practice is to avoid running a dynamic flow or work under the self.schedule method.
Instead, instantiate them within the condition, but run them outside.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.structures import List
class SchedulerDAG(LightningFlow):
def __init__(self):
super().__init__()
self.dags = List()
def run(self):
if self.schedule("hourly"):
self.dags.append(DAG(...))
for dag in self.dags:
payload = dag.run()
**Learn more about Scheduling**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Schedule your components
:description: Learn more scheduling.
:col_css: col-md-4
:button_link: ../../../glossary/scheduling.html
:height: 180
:tag: Basic
.. displayitem::
:header: Build your own DAG
:description: Learn more DAG scheduling with examples.
:col_css: col-md-4
:button_link: ../../../examples/app/dag/dag.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{cron_pattern}.{frame.f_code.co_filename}.{frame.f_lineno}"
else:
cache_key = user_key
call_hash = f"{self.schedule.__name__}:{DeepHash(cache_key)[cache_key]}"
if "scheduling" not in self._calls:
self._calls["scheduling"] = {}
entered = call_hash in self._calls["scheduling"]
expr_aliases = {
"midnight": "@midnight",
"hourly": "@hourly",
"daily": "@daily",
"weekly": "@weekly",
"monthly": "@monthly",
"yearly": "@yearly",
"annually": "@annually",
}
if cron_pattern in expr_aliases:
cron_pattern = expr_aliases[cron_pattern]
if not entered:
if not start_time:
start_time = datetime.now()
schedule_metadata = {
"running": False,
"cron_pattern": cron_pattern,
"start_time": str(start_time.isoformat()),
"name": self.name,
}
self._calls["scheduling"][call_hash] = schedule_metadata
app = _LightningAppRef().get_current()
if app:
app._register_schedule(call_hash, schedule_metadata)
return True
return self._calls["scheduling"][call_hash]["running"]
def _enable_schedule(self, call_hash: str) -> None:
self._calls["scheduling"][call_hash]["running"] = True
def _disable_running_schedules(self) -> None:
if "scheduling" not in self._calls:
return
for call_hash in self._calls["scheduling"]:
self._calls["scheduling"][call_hash]["running"] = False
def configure_layout(self) -> Union[Dict[str, Any], List[Dict[str, Any]], Frontend]:
"""Configure the UI layout of this LightningFlow.
You can either
1. Return a single :class:`~lightning.app.frontend.frontend.Frontend` object to serve a user interface
for this Flow.
2. Return a single dictionary to expose the UI of a child flow.
3. Return a list of dictionaries to arrange the children of this flow in one or multiple tabs.
**Example:** Serve a static directory (with at least a file index.html inside).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StaticWebFrontend("path/to/folder/to/serve")
**Example:** Serve a streamlit UI (needs the streamlit package to be installed).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StreamlitFrontend(render_fn=my_streamlit_ui)
def my_streamlit_ui(state):
# add your streamlit code here!
import streamlit as st
**Example:** Arrange the UI of my children in tabs (default UI by Lightning).
.. code-block:: python
class Flow(LightningFlow):
def configure_layout(self):
return [
dict(name="First Tab", content=self.child0),
dict(name="Second Tab", content=self.child1),
dict(name="Lightning", content="https://lightning.ai"),
]
If you don't implement ``configure_layout``, Lightning will collect all children and display their UI in a tab
(if they have their own ``configure_layout`` implemented).
Note:
This hook gets called at the time of app creation and then again as part of the loop. If desired, the
returned layout configuration can depend on the state. The only exception are the flows that return a
:class:`~lightning.app.frontend.frontend.Frontend`. These need to be provided at the time of app creation
in order for the runtime to start the server.
**Learn more about adding UI**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Add a web user interface (UI)
:description: Learn more how to integrate several UIs.
:col_css: col-md-4
:button_link: ../../../workflows/add_web_ui/index.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
return [{"name": name, "content": component} for (name, component) in self.flows.items()]
def experimental_iterate(self, iterable: Iterable, run_once: bool = True, user_key: str = "") -> Generator:
"""This method should always be used with any kind of iterable to ensure its fault tolerant.
If you want your iterable to always be consumed from scratch, you shouldn't use this method.
Arguments:
iterable: Iterable to iterate over. The iterable shouldn't have side effects or be random.
run_once: Whether to run the entire iteration only once.
Otherwise, it would restart from the beginning.
user_key: Key to be used to track the caching mechanism.
"""
if not isinstance(iterable, Iterable):
raise TypeError(f"An iterable should be provided to `self.iterate` method. Found {iterable}")
# TODO: Find a better way. Investigated using __reduce__, but state change invalidate the cache.
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{frame.f_code.co_filename}.{frame.f_code.co_firstlineno}"
else:
cache_key = user_key
call_hash = f"{self.experimental_iterate.__name__}:{DeepHash(cache_key)[cache_key]}"
entered = call_hash in self._calls
has_started = entered and self._calls[call_hash]["counter"] > 0
has_finished = entered and self._calls[call_hash]["has_finished"]
if has_finished:
if not run_once:
self._calls[call_hash].update({"counter": 0, "has_finished": False})
else:
return range(0)
if not has_started:
self._calls[call_hash] = {
"name": self.experimental_iterate.__name__,
"call_hash": call_hash,
"counter": 0,
"has_finished": False,
}
skip_counter = max(self._calls[call_hash]["counter"], 0)
for counter, value in enumerate(iterable):
if skip_counter:
skip_counter -= 1
continue
self._calls[call_hash].update({"counter": counter})
yield value
self._calls[call_hash].update({"has_finished": True})
def configure_commands(self) -> None:
"""Configure the commands of this LightningFlow.
Returns a list of dictionaries mapping a command name to a flow method.
.. code-block:: python
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def configure_commands(self):
return {"my_command_name": self.my_remote_method}
def my_remote_method(self, name):
self.names.append(name)
Once the app is running with the following command:
.. code-block:: bash
lightning_app run app app.py
.. code-block:: bash
lightning_app my_command_name --args name=my_own_name
"""
raise NotImplementedError
def configure_api(self) -> None:
"""Configure the API routes of the LightningFlow.
Returns a list of HttpMethod such as Post or Get.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.api import Post
from pydantic import BaseModel
class HandlerModel(BaseModel):
name: str
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def handler(self, config: HandlerModel) -> None:
self.names.append(config.name)
def configure_api(self):
return [Post("/v1/api/request", self.handler)]
Once the app is running, you can access the Swagger UI of the app
under the ``/docs`` route.
"""
raise NotImplementedError
def state_dict(self) -> dict:
"""Returns the current flow state but not its children."""
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"calls": self._calls.copy(),
"changes": {},
"flows": {},
"works": {},
"structures": {},
}
def load_state_dict(
self,
flow_state: Dict[str, Any],
children_states: Dict[str, Any],
strict: bool = True,
) -> None:
"""Reloads the state of this flow and its children.
.. code-block:: python
class Work(LightningWork):
def __init__(self):
super().__init__()
self.counter = 0
def run(self):
self.counter += 1
class Flow(LightningFlow):
def run(self):
# dynamically create a work.
if not getattr(self, "w", None):
self.w = WorkReload()
self.w.run()
def load_state_dict(self, flow_state, children_states, strict) -> None:
# 1: Re-instantiate the dynamic work
self.w = Work()
# 2: Make any states modification / migration.
...
# 3: Call the parent ``load_state_dict`` to
# recursively reload the states.
super().load_state_dict(
flow_state,
children_states,
strict,
)
Arguments:
flow_state: The state of the current flow.
children_states: The state of the dynamic children of this flow.
strict: Whether to raise an exception if a dynamic
children hasn't been re-created.
"""
self.set_state(flow_state, recurse=False)
direct_children_states = {k: v for k, v in children_states.items() if "." not in k}
for child_name, state in direct_children_states.items():
child = getattr(self, child_name, None)
if isinstance(child, LightningFlow):
lower_children_states = {
k.replace(child_name + ".", ""): v
for k, v in children_states.items()
if k.startswith(child_name) and k != child_name
}
child.load_state_dict(state, lower_children_states, strict=strict)
elif isinstance(child, LightningWork):
child.set_state(state)
elif strict:
raise ValueError(f"The component {child_name} wasn't instantiated for the component {self.name}")
def _delta_to_app_state_delta(root: "LightningFlow", component: "Component", delta: Delta) -> Delta:
delta_dict = delta.to_dict()
for changed in delta_dict.values():
for delta_key in changed.copy():
val = changed[delta_key]
new_prefix = "root"
for p, c in _walk_to_component(root, component):
if isinstance(c, lightning.app.core.LightningWork):
new_prefix += "['works']"
if isinstance(c, lightning.app.core.LightningFlow):
new_prefix += "['flows']"
if isinstance(c, (lightning.app.structures.Dict, lightning.app.structures.List)):
new_prefix += "['structures']"
c_n = c.name.split(".")[-1]
new_prefix += f"['{c_n}']"
delta_key_without_root = delta_key[4:] # the first 4 chars are the word 'root', strip it
new_key = new_prefix + delta_key_without_root
if new_key != delta_key:
changed[new_key] = val
del changed[delta_key]
return Delta(delta_dict) | null |
155,596 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
The provided code snippet includes necessary dependencies for implementing the `_collect_child_process_pids` function. Write a Python function `def _collect_child_process_pids(pid: int) -> List[int]` to solve the following problem:
Function to return the list of child process pid's of a process.
Here is the function:
def _collect_child_process_pids(pid: int) -> List[int]:
"""Function to return the list of child process pid's of a process."""
processes = os.popen("ps -ej | grep -i 'python' | grep -v 'grep' | awk '{ print $2,$3 }'").read()
processes = [p.split(" ") for p in processes.split("\n")[:-1]]
return [int(child) for child, parent in processes if parent == str(pid) and child != str(pid)] | Function to return the list of child process pid's of a process. |
155,597 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def _print_to_logger_info(*args: Any, **kwargs: Any):
# TODO Find a better way to re-direct print to loggers.
lightning.app._logger.info(" ".join([str(v) for v in args]))
The provided code snippet includes necessary dependencies for implementing the `convert_print_to_logger_info` function. Write a Python function `def convert_print_to_logger_info(func: Callable) -> Callable` to solve the following problem:
This function is used to transform any print into logger.info calls, so it gets tracked in the cloud.
Here is the function:
def convert_print_to_logger_info(func: Callable) -> Callable:
"""This function is used to transform any print into logger.info calls, so it gets tracked in the cloud."""
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
original_print = __builtins__["print"]
__builtins__["print"] = _print_to_logger_info
res = func(*args, **kwargs)
__builtins__["print"] = original_print
return res
return wrapper | This function is used to transform any print into logger.info calls, so it gets tracked in the cloud. |
155,598 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
The provided code snippet includes necessary dependencies for implementing the `pretty_state` function. Write a Python function `def pretty_state(state: Dict) -> Dict` to solve the following problem:
Utility to prettify the state by removing hidden attributes.
Here is the function:
def pretty_state(state: Dict) -> Dict:
"""Utility to prettify the state by removing hidden attributes."""
new_state = {}
for k, v in state["vars"].items():
if not k.startswith("_"):
if "vars" not in new_state:
new_state["vars"] = {}
new_state["vars"][k] = v
if "flows" in state:
for k, v in state["flows"].items():
if "flows" not in new_state:
new_state["flows"] = {}
new_state["flows"][k] = pretty_state(state["flows"][k])
if "works" in state:
for k, v in state["works"].items():
if "works" not in new_state:
new_state["works"] = {}
new_state["works"][k] = pretty_state(state["works"][k])
return new_state | Utility to prettify the state by removing hidden attributes. |
155,599 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def _state_dict(flow: "LightningFlow"):
state = {}
flows = [flow] + list(flow.flows.values())
for f in flows:
state[f.name] = f.state_dict()
for w in flow.works():
state[w.name] = w.state
return state | null |
155,600 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def affiliation(component: "Component") -> Tuple[str, ...]:
"""Returns the affiliation of a component."""
if component.name in ("root", ""):
return ()
return tuple(component.name.split(".")[1:])
The provided code snippet includes necessary dependencies for implementing the `_load_state_dict` function. Write a Python function `def _load_state_dict(root_flow: "LightningFlow", state: Dict[str, Any], strict: bool = True) -> None` to solve the following problem:
This function is used to reload the state assuming dynamic components creation. When a component isn't found but its state exists, its state is passed up to its closest existing parent. Arguments: root_flow: The flow at the top of the component tree. state: The collected state dict. strict: Whether to validate all components have been re-created.
Here is the function:
def _load_state_dict(root_flow: "LightningFlow", state: Dict[str, Any], strict: bool = True) -> None:
"""This function is used to reload the state assuming dynamic components creation.
When a component isn't found but its state exists, its state is passed up to its closest existing parent.
Arguments:
root_flow: The flow at the top of the component tree.
state: The collected state dict.
strict: Whether to validate all components have been re-created.
"""
# 1: Reload the state of the existing works
for w in root_flow.works():
w.set_state(state.pop(w.name))
# 2: Collect the existing flows
flows = [root_flow] + list(root_flow.flows.values())
flow_map = {f.name: f for f in flows}
# 3: Find the state of the all dynamic components
dynamic_components = {k: v for k, v in state.items() if k not in flow_map}
# 4: Propagate the state of the dynamic components to their closest parents
dynamic_children_state = {}
for name, component_state in dynamic_components.items():
affiliation = name.split(".")
for idx in range(0, len(affiliation)):
parent_name = ".".join(affiliation[:-idx])
has_matched = False
for flow_name, flow in flow_map.items():
if flow_name == parent_name:
if flow_name not in dynamic_children_state:
dynamic_children_state[flow_name] = {}
dynamic_children_state[flow_name].update({name.replace(parent_name + ".", ""): component_state})
has_matched = True
break
if has_matched:
break
# 5: Reload the flow states
for flow_name, flow in flow_map.items():
flow.load_state_dict(state.pop(flow_name), dynamic_children_state.get(flow_name, {}), strict=strict)
# 6: Verify all dynamic components has been re-created.
if strict:
components_names = (
[root_flow.name] + [f.name for f in root_flow.flows.values()] + [w.name for w in root_flow.works()]
)
for component_name in dynamic_components:
if component_name not in components_names:
raise Exception(f"The component {component_name} was re-created during state reloading.") | This function is used to reload the state assuming dynamic components creation. When a component isn't found but its state exists, its state is passed up to its closest existing parent. Arguments: root_flow: The flow at the top of the component tree. state: The collected state dict. strict: Whether to validate all components have been re-created. |
155,601 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def is_static_method(klass_or_instance, attr) -> bool:
return isinstance(inspect.getattr_static(klass_or_instance, attr), staticmethod) | null |
155,602 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def _lightning_dispatched() -> bool:
return bool(int(os.getenv("LIGHTNING_DISPATCHED", 0)))
def _using_debugger() -> bool:
"""This method is used to detect whether the app is run with a debugger attached."""
if "LIGHTNING_DETECTED_DEBUGGER" in os.environ:
return True
# Collect the information about the process.
parent_process = os.popen(f"ps -ax | grep -i {os.getpid()} | grep -v grep").read()
# Detect whether VSCode or PyCharm debugger are used
use_debugger = "debugpy" in parent_process or "pydev" in parent_process
# Store the result to avoid multiple popen calls.
if use_debugger:
os.environ["LIGHTNING_DETECTED_DEBUGGER"] = "1"
return use_debugger
def _should_dispatch_app() -> bool:
return (
not _lightning_dispatched()
and "LIGHTNING_APP_STATE_URL" not in os.environ
# Keep last to avoid running it if already dispatched
and _using_debugger()
) | null |
155,603 | import abc
import asyncio
import builtins
import enum
import functools
import inspect
import json
import logging
import os
import sys
import threading
import time
from abc import ABC, abstractmethod
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, List, Mapping, Optional, Tuple, Type
from unittest.mock import MagicMock
import websockets
from deepdiff import Delta
import lightning.app
from lightning.app.utilities.exceptions import LightningAppStateException
from lightning.app.utilities.tree import breadth_first
def breadth_first(root: "Component", types: Type["ComponentTuple"] = None):
"""Returns a generator that walks through the tree of components breadth-first.
Arguments:
root: The root component of the tree
types: If provided, only the component types in this list will be visited.
"""
yield from _BreadthFirstVisitor(root, types)
class LightningFlow:
_INTERNAL_STATE_VARS = {
# Internal protected variables that are still part of the state (even though they are prefixed with "_")
"_paths",
"_layout",
}
def __init__(self) -> None:
"""The LightningFlow is used by the :class:`~lightning.app.core.app.LightningApp` to coordinate and manage
long- running jobs contained, the :class:`~lightning.app.core.work.LightningWork`.
A LightningFlow is characterized by:
* A set of state variables.
* Long-running jobs (:class:`~lightning.app.core.work.LightningWork`).
* Its children ``LightningFlow`` or ``LightningWork`` with their state variables.
**State variables**
The LightningFlow are special classes whose attributes require to be
json-serializable (e.g., int, float, bool, list, dict, ...).
They also may not reach into global variables unless they are constant.
The attributes need to be all defined in `__init__` method,
and eventually assigned to different values throughout the lifetime of the object.
However, defining new attributes outside of `__init__` is not allowed.
Attributes taken together represent the state of the component.
Components are capable of retrieving their state and that of their
children recursively at any time. They are also capable of setting
an externally provided state recursively to its children.
**Execution model and work**
The entry point for execution is the ``run`` method at the root component.
The ``run`` method of the root component may call the ``run`` method of its children, and the children
may call the ``run`` methods of their children and so on.
The ``run`` method of the root component is called repeatedly in a while loop forever until the app gets
terminated. In this programming model (reminiscent of React, Vue or Streamlit from the JavaScript world),
the values of the state variables, or their changes, are translated into actions throughout the component
hierarchy. This means the flow of execution will only be affected by state changes in a component or one of
its children, and otherwise remain idempotent.
The actions themselves are self-contained within :class:`~lightning.app.core.work.LightningWork`.
The :class:`~lightning.app.core.work.LightningWork` are typically used for long-running jobs,
like downloading a dataset, performing a query, starting a computationally heavy script.
While one may access any state variable in a LightningWork from a LightningFlow, one may not
directly call methods of other components from within a LightningWork as LightningWork can't have any children.
This limitation allows applications to be distributed at scale.
**Component hierarchy and App**
Given the above characteristics, a root LightningFlow, potentially containing
children components, can be passed to an App object and its execution
can be distributed (each LightningWork will be run within its own process
or different arrangements).
Example:
>>> from lightning.app import LightningFlow
>>> class RootFlow(LightningFlow):
... def __init__(self):
... super().__init__()
... self.counter = 0
... def run(self):
... self.counter += 1
...
>>> flow = RootFlow()
>>> flow.run()
>>> assert flow.counter == 1
>>> assert flow.state["vars"]["counter"] == 1
"""
self._state: set = set()
self._name: str = ""
self._flows: set = set()
self._works: set = set()
self._structures: set = set()
self._calls: dict = {}
self._changes: dict = {}
self._layout: Union[List[Dict], Dict] = {}
self._paths: dict = {}
self._backend: Optional["Backend"] = None
# tuple instead of a list so that it cannot be modified without using the setter
self._lightningignore: Tuple[str, ...] = ()
def name(self) -> str:
"""Return the current LightningFlow name."""
return self._name or "root"
def __setattr__(self, name: str, value: Any) -> None:
attr = getattr(self.__class__, name, None)
if isinstance(attr, property) and attr.fset is not None:
return attr.fset(self, value)
from lightning.app.structures import Dict as ComponentDict
from lightning.app.structures import List as ComponentList
if (
not _is_init_context(self)
and name not in self._state
and name not in self._paths
and (
not isinstance(value, (LightningWork, LightningFlow))
or (isinstance(value, (LightningWork, LightningFlow)) and not _is_run_context(self))
)
and name not in self._works.union(self._flows)
and self._is_state_attribute(name)
):
raise AttributeError(f"Cannot set attributes that were not defined in __init__: {name}")
if isinstance(value, str) and value.startswith("lit://"):
value = Path(value)
if self._is_state_attribute(name):
if hasattr(self, name):
if name in self._flows and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the flow can't be changed once defined: {name}")
if name in self._works and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the work can't be changed once defined: {name}")
if isinstance(value, (list, dict)) and value:
_type = (LightningFlow, LightningWork, ComponentList, ComponentDict)
if isinstance(value, list) and all(isinstance(va, _type) for va in value):
value = ComponentList(*value)
if isinstance(value, dict) and all(isinstance(va, _type) for va in value.values()):
value = ComponentDict(**value)
if isinstance(value, LightningFlow):
self._flows.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
# Attach the backend to the flow and its children work.
if self._backend:
LightningFlow._attach_backend(value, self._backend)
for work in value.works():
work._register_cloud_compute()
elif isinstance(value, LightningWork):
self._works.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
if self._backend:
self._backend._wrap_run_method(_LightningAppRef().get_current(), value) # type: ignore[arg-type]
value._register_cloud_compute()
elif isinstance(value, (ComponentDict, ComponentList)):
self._structures.add(name)
_set_child_name(self, value, name)
_backend = getattr(self, "backend", None)
if _backend is not None:
value._backend = _backend
for flow in value.flows:
if _backend is not None:
LightningFlow._attach_backend(flow, _backend)
for work in value.works:
work._register_cloud_compute()
if _backend is not None:
_backend._wrap_run_method(_LightningAppRef().get_current(), work)
elif isinstance(value, Path):
# In the init context, the full name of the Flow and Work is not known, i.e., we can't serialize
# the path without losing the information of origin and consumer. Hence, we delay the serialization
# of the path object until the app is instantiated.
if not _is_init_context(self):
self._paths[name] = value.to_dict()
self._state.add(name)
elif isinstance(value, Drive):
value = deepcopy(value)
value.component_name = self.name
self._state.add(name)
elif isinstance(value, CloudCompute):
self._state.add(name)
elif _is_json_serializable(value):
self._state.add(name)
if not isinstance(value, Path) and hasattr(self, "_paths") and name in self._paths:
# The attribute changed type from Path to another
self._paths.pop(name)
else:
raise AttributeError(
f"Only JSON-serializable attributes are currently supported"
f" (str, int, float, bool, tuple, list, dict etc.) to be part of {self} state. "
f"Found the attribute {name} with {value} instead. \n"
"HINT: Private attributes defined as follows `self._x = y` won't be shared between components "
"and therefore don't need to be JSON-serializable."
)
super().__setattr__(name, value)
return None
def _attach_backend(flow: "LightningFlow", backend: "Backend") -> None:
"""Attach the backend to all flows and its children."""
flow._backend = backend
for name in flow._structures:
getattr(flow, name)._backend = backend
for child_flow in flow.flows.values():
child_flow._backend = backend
for name in child_flow._structures:
getattr(child_flow, name)._backend = backend
app = _LightningAppRef().get_current()
for child_work in flow.works():
child_work._backend = backend
backend._wrap_run_method(app, child_work) # type: ignore[arg-type]
def __getattr__(self, item: str) -> Any:
if item in self.__dict__.get("_paths", {}):
return Path.from_dict(self._paths[item])
return self.__getattribute__(item)
def ready(self) -> bool:
"""Override to customize when your App should be ready."""
flows = self.flows
return all(flow.ready for flow in flows.values()) if flows else True
def changes(self) -> dict:
return self._changes.copy()
def state(self) -> dict:
"""Returns the current flow state along its children."""
children_state = {child: getattr(self, child).state for child in self._flows}
works_state = {work: getattr(self, work).state for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state for child in self._structures},
"changes": {},
}
def state_vars(self) -> dict:
children_state = {child: getattr(self, child).state_vars for child in self._flows}
works_state = {work: getattr(self, work).state_vars for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_vars for child in self._structures},
}
def state_with_changes(self) -> dict:
children_state = {child: getattr(self, child).state_with_changes for child in self._flows}
works_state = {work: getattr(self, work).state_with_changes for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_with_changes for child in self._structures},
"changes": self.changes,
}
def flows(self) -> Dict[str, "LightningFlow"]:
"""Return its children LightningFlow."""
flows = {}
for el in sorted(self._flows):
flow = getattr(self, el)
flows[flow.name] = flow
flows.update(flow.flows)
for struct_name in sorted(self._structures):
flows.update(getattr(self, struct_name).flows)
return flows
def lightningignore(self) -> Tuple[str, ...]:
"""Programmatic equivalent of the ``.lightningignore`` file."""
return self._lightningignore
def lightningignore(self, lightningignore: Tuple[str, ...]) -> None:
if self._backend is not None:
raise RuntimeError(
f"Your app has been already dispatched, so modifying the `{self.name}.lightningignore` does not have an"
" effect"
)
self._lightningignore = lightningignore
def works(self, recurse: bool = True) -> List[LightningWork]:
"""Return its :class:`~lightning.app.core.work.LightningWork`."""
works = [getattr(self, el) for el in sorted(self._works)]
if not recurse:
return works
for child_name in sorted(self._flows):
for w in getattr(self, child_name).works(recurse=recurse):
works.append(w)
for struct_name in sorted(self._structures):
for w in getattr(self, struct_name).works:
works.append(w)
return works
def named_works(self, recurse: bool = True) -> List[Tuple[str, LightningWork]]:
"""Return its :class:`~lightning.app.core.work.LightningWork` with their names."""
return [(w.name, w) for w in self.works(recurse=recurse)]
def set_state(self, provided_state: Dict, recurse: bool = True) -> None:
"""Method to set the state to this LightningFlow, its children and
:class:`~lightning.app.core.work.LightningWork`.
Arguments:
provided_state: The state to be reloaded
recurse: Whether to apply the state down children.
"""
for k, v in provided_state["vars"].items():
if isinstance(v, Dict):
v = _maybe_create_drive(self.name, v)
if isinstance(v, Dict):
v = _maybe_create_cloud_compute(v)
setattr(self, k, v)
self._changes = provided_state["changes"]
self._calls.update(provided_state["calls"])
if not recurse:
return
for child, state in provided_state["flows"].items():
getattr(self, child).set_state(state)
for work, state in provided_state["works"].items():
getattr(self, work).set_state(state)
for structure, state in provided_state["structures"].items():
getattr(self, structure).set_state(state)
def stop(self, end_msg: str = "") -> None:
"""Method used to exit the application."""
if end_msg:
print(end_msg)
raise ExitAppException
def fail(self, end_msg: str = "") -> None:
"""Method used to exit and fail the application."""
if end_msg:
print(end_msg)
raise LightningFlowException
def _exit(self, end_msg: str = "") -> None:
"""Used to exit the application.
Private method.
.. deprecated:: 1.9.0
This function is deprecated and will be removed in 2.0.0. Use :meth:`stop` instead.
"""
warnings.warn(
DeprecationWarning(
"This function is deprecated and will be removed in 2.0.0. Use `LightningFlow.stop` instead."
)
)
return self.stop(end_msg=end_msg)
def _is_state_attribute(name: str) -> bool:
"""Every public attribute is part of the state by default and all protected (prefixed by '_') or private
(prefixed by '__') attributes are not.
Exceptions are listed in the `_INTERNAL_STATE_VARS` class variable.
"""
return name in LightningFlow._INTERNAL_STATE_VARS or not name.startswith("_")
def run(self, *args: Any, **kwargs: Any) -> None:
"""Override with your own logic."""
pass
def schedule(
self, cron_pattern: str, start_time: Optional[datetime] = None, user_key: Optional[str] = None
) -> bool:
"""The schedule method is used to run a part of the flow logic on timely manner.
.. code-block:: python
from lightning.app import LightningFlow
class Flow(LightningFlow):
def run(self):
if self.schedule("hourly"):
print("run some code every hour")
Arguments:
cron_pattern: The cron pattern to provide. Learn more at https://crontab.guru/.
start_time: The start time of the cron job.
user_key: Optional key used to improve the caching mechanism.
A best practice is to avoid running a dynamic flow or work under the self.schedule method.
Instead, instantiate them within the condition, but run them outside.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.structures import List
class SchedulerDAG(LightningFlow):
def __init__(self):
super().__init__()
self.dags = List()
def run(self):
if self.schedule("hourly"):
self.dags.append(DAG(...))
for dag in self.dags:
payload = dag.run()
**Learn more about Scheduling**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Schedule your components
:description: Learn more scheduling.
:col_css: col-md-4
:button_link: ../../../glossary/scheduling.html
:height: 180
:tag: Basic
.. displayitem::
:header: Build your own DAG
:description: Learn more DAG scheduling with examples.
:col_css: col-md-4
:button_link: ../../../examples/app/dag/dag.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{cron_pattern}.{frame.f_code.co_filename}.{frame.f_lineno}"
else:
cache_key = user_key
call_hash = f"{self.schedule.__name__}:{DeepHash(cache_key)[cache_key]}"
if "scheduling" not in self._calls:
self._calls["scheduling"] = {}
entered = call_hash in self._calls["scheduling"]
expr_aliases = {
"midnight": "@midnight",
"hourly": "@hourly",
"daily": "@daily",
"weekly": "@weekly",
"monthly": "@monthly",
"yearly": "@yearly",
"annually": "@annually",
}
if cron_pattern in expr_aliases:
cron_pattern = expr_aliases[cron_pattern]
if not entered:
if not start_time:
start_time = datetime.now()
schedule_metadata = {
"running": False,
"cron_pattern": cron_pattern,
"start_time": str(start_time.isoformat()),
"name": self.name,
}
self._calls["scheduling"][call_hash] = schedule_metadata
app = _LightningAppRef().get_current()
if app:
app._register_schedule(call_hash, schedule_metadata)
return True
return self._calls["scheduling"][call_hash]["running"]
def _enable_schedule(self, call_hash: str) -> None:
self._calls["scheduling"][call_hash]["running"] = True
def _disable_running_schedules(self) -> None:
if "scheduling" not in self._calls:
return
for call_hash in self._calls["scheduling"]:
self._calls["scheduling"][call_hash]["running"] = False
def configure_layout(self) -> Union[Dict[str, Any], List[Dict[str, Any]], Frontend]:
"""Configure the UI layout of this LightningFlow.
You can either
1. Return a single :class:`~lightning.app.frontend.frontend.Frontend` object to serve a user interface
for this Flow.
2. Return a single dictionary to expose the UI of a child flow.
3. Return a list of dictionaries to arrange the children of this flow in one or multiple tabs.
**Example:** Serve a static directory (with at least a file index.html inside).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StaticWebFrontend("path/to/folder/to/serve")
**Example:** Serve a streamlit UI (needs the streamlit package to be installed).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StreamlitFrontend(render_fn=my_streamlit_ui)
def my_streamlit_ui(state):
# add your streamlit code here!
import streamlit as st
**Example:** Arrange the UI of my children in tabs (default UI by Lightning).
.. code-block:: python
class Flow(LightningFlow):
def configure_layout(self):
return [
dict(name="First Tab", content=self.child0),
dict(name="Second Tab", content=self.child1),
dict(name="Lightning", content="https://lightning.ai"),
]
If you don't implement ``configure_layout``, Lightning will collect all children and display their UI in a tab
(if they have their own ``configure_layout`` implemented).
Note:
This hook gets called at the time of app creation and then again as part of the loop. If desired, the
returned layout configuration can depend on the state. The only exception are the flows that return a
:class:`~lightning.app.frontend.frontend.Frontend`. These need to be provided at the time of app creation
in order for the runtime to start the server.
**Learn more about adding UI**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Add a web user interface (UI)
:description: Learn more how to integrate several UIs.
:col_css: col-md-4
:button_link: ../../../workflows/add_web_ui/index.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
return [{"name": name, "content": component} for (name, component) in self.flows.items()]
def experimental_iterate(self, iterable: Iterable, run_once: bool = True, user_key: str = "") -> Generator:
"""This method should always be used with any kind of iterable to ensure its fault tolerant.
If you want your iterable to always be consumed from scratch, you shouldn't use this method.
Arguments:
iterable: Iterable to iterate over. The iterable shouldn't have side effects or be random.
run_once: Whether to run the entire iteration only once.
Otherwise, it would restart from the beginning.
user_key: Key to be used to track the caching mechanism.
"""
if not isinstance(iterable, Iterable):
raise TypeError(f"An iterable should be provided to `self.iterate` method. Found {iterable}")
# TODO: Find a better way. Investigated using __reduce__, but state change invalidate the cache.
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{frame.f_code.co_filename}.{frame.f_code.co_firstlineno}"
else:
cache_key = user_key
call_hash = f"{self.experimental_iterate.__name__}:{DeepHash(cache_key)[cache_key]}"
entered = call_hash in self._calls
has_started = entered and self._calls[call_hash]["counter"] > 0
has_finished = entered and self._calls[call_hash]["has_finished"]
if has_finished:
if not run_once:
self._calls[call_hash].update({"counter": 0, "has_finished": False})
else:
return range(0)
if not has_started:
self._calls[call_hash] = {
"name": self.experimental_iterate.__name__,
"call_hash": call_hash,
"counter": 0,
"has_finished": False,
}
skip_counter = max(self._calls[call_hash]["counter"], 0)
for counter, value in enumerate(iterable):
if skip_counter:
skip_counter -= 1
continue
self._calls[call_hash].update({"counter": counter})
yield value
self._calls[call_hash].update({"has_finished": True})
def configure_commands(self) -> None:
"""Configure the commands of this LightningFlow.
Returns a list of dictionaries mapping a command name to a flow method.
.. code-block:: python
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def configure_commands(self):
return {"my_command_name": self.my_remote_method}
def my_remote_method(self, name):
self.names.append(name)
Once the app is running with the following command:
.. code-block:: bash
lightning_app run app app.py
.. code-block:: bash
lightning_app my_command_name --args name=my_own_name
"""
raise NotImplementedError
def configure_api(self) -> None:
"""Configure the API routes of the LightningFlow.
Returns a list of HttpMethod such as Post or Get.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.api import Post
from pydantic import BaseModel
class HandlerModel(BaseModel):
name: str
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def handler(self, config: HandlerModel) -> None:
self.names.append(config.name)
def configure_api(self):
return [Post("/v1/api/request", self.handler)]
Once the app is running, you can access the Swagger UI of the app
under the ``/docs`` route.
"""
raise NotImplementedError
def state_dict(self) -> dict:
"""Returns the current flow state but not its children."""
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"calls": self._calls.copy(),
"changes": {},
"flows": {},
"works": {},
"structures": {},
}
def load_state_dict(
self,
flow_state: Dict[str, Any],
children_states: Dict[str, Any],
strict: bool = True,
) -> None:
"""Reloads the state of this flow and its children.
.. code-block:: python
class Work(LightningWork):
def __init__(self):
super().__init__()
self.counter = 0
def run(self):
self.counter += 1
class Flow(LightningFlow):
def run(self):
# dynamically create a work.
if not getattr(self, "w", None):
self.w = WorkReload()
self.w.run()
def load_state_dict(self, flow_state, children_states, strict) -> None:
# 1: Re-instantiate the dynamic work
self.w = Work()
# 2: Make any states modification / migration.
...
# 3: Call the parent ``load_state_dict`` to
# recursively reload the states.
super().load_state_dict(
flow_state,
children_states,
strict,
)
Arguments:
flow_state: The state of the current flow.
children_states: The state of the dynamic children of this flow.
strict: Whether to raise an exception if a dynamic
children hasn't been re-created.
"""
self.set_state(flow_state, recurse=False)
direct_children_states = {k: v for k, v in children_states.items() if "." not in k}
for child_name, state in direct_children_states.items():
child = getattr(self, child_name, None)
if isinstance(child, LightningFlow):
lower_children_states = {
k.replace(child_name + ".", ""): v
for k, v in children_states.items()
if k.startswith(child_name) and k != child_name
}
child.load_state_dict(state, lower_children_states, strict=strict)
elif isinstance(child, LightningWork):
child.set_state(state)
elif strict:
raise ValueError(f"The component {child_name} wasn't instantiated for the component {self.name}")
The provided code snippet includes necessary dependencies for implementing the `_is_headless` function. Write a Python function `def _is_headless(app: "LightningApp") -> bool` to solve the following problem:
Utility which returns True if the given App has no ``Frontend`` objects or URLs exposed through ``configure_layout``.
Here is the function:
def _is_headless(app: "LightningApp") -> bool:
"""Utility which returns True if the given App has no ``Frontend`` objects or URLs exposed through
``configure_layout``."""
if app.frontends:
return False
for component in breadth_first(app.root, types=(lightning.app.LightningFlow,)):
for entry in component._layout:
if "target" in entry:
return False
return True | Utility which returns True if the given App has no ``Frontend`` objects or URLs exposed through ``configure_layout``. |
155,604 | import os
import pathlib
import queue
import signal
import sys
import threading
import time
import traceback
import warnings
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from functools import partial
from threading import Event, Thread
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Optional, Set, Tuple, Type, Union
from deepdiff import DeepDiff, Delta
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.core import constants
from lightning.app.core.queues import MultiProcessQueue
from lightning.app.storage.copier import _Copier, _copy_files
from lightning.app.storage.drive import Drive, _maybe_create_drive
from lightning.app.storage.path import Path, _path_to_work_artifact
from lightning.app.storage.payload import Payload
from lightning.app.utilities.app_helpers import affiliation
from lightning.app.utilities.component import _set_work_context
from lightning.app.utilities.enum import (
CacheCallsKeys,
WorkFailureReasons,
WorkStageStatus,
WorkStopReasons,
make_status,
)
from lightning.app.utilities.exceptions import CacheMissException, LightningSigtermStateException
from lightning.app.utilities.app_helpers import Logger
class ProxyWorkRun:
work_run: Callable
work_name: str # TODO: remove this argument and get the name from work.name directly
work: "LightningWork"
caller_queue: "BaseQueue"
def __post_init__(self):
self.work_state = None
def __call__(self, *args: Any, **kwargs: Any):
self.has_sent = False
self._validate_call_args(args, kwargs)
args, kwargs = self._process_call_args(args, kwargs)
call_hash = self.work._call_hash(self.work_run, *self._convert_hashable(args, kwargs))
entered = call_hash in self.work._calls
returned = entered and "ret" in self.work._calls[call_hash]
# TODO (tchaton): Handle spot instance retrieval differently from stopped work.
stopped_on_sigterm = self.work._restarting and self.work.status.reason == WorkStopReasons.SIGTERM_SIGNAL_HANDLER
data = {"args": args, "kwargs": kwargs, "call_hash": call_hash}
# The if/else conditions are left un-compressed to simplify readability for the readers.
if not entered or stopped_on_sigterm:
_send_data_to_caller_queue(self, self.work, self.caller_queue, data, call_hash)
else:
if self.work.cache_calls and returned:
return
if returned or stopped_on_sigterm:
# the previous task has completed and we can re-queue the next one.
# overriding the return value for next loop iteration.
_send_data_to_caller_queue(self, self.work, self.caller_queue, data, call_hash)
if not self.work.parallel:
raise CacheMissException("Task never called before. Triggered now")
def _validate_call_args(self, args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> None:
"""Validate the call args before they get passed to the run method of the Work.
Currently, this performs a check against strings that look like filesystem paths and may need to be wrapped with
a Lightning Path by the user.
"""
def warn_if_pathlike(obj: Union[os.PathLike, str]):
if isinstance(obj, Path):
return
if os.sep in str(obj) and os.path.exists(obj):
# NOTE: The existence check is wrong in general, as the file will never exist on the disk
# where the flow is running unless we are running locally
warnings.warn(
f"You passed a the value {obj!r} as an argument to the `run()` method of {self.work_name} and"
f" it looks like this is a path to a file or a folder. Consider wrapping this path in a"
f" `lightning.app.storage.Path` object to be able to access these files in your Work.",
UserWarning,
)
apply_to_collection(args, dtype=(os.PathLike, str), function=warn_if_pathlike)
apply_to_collection(kwargs, dtype=(os.PathLike, str), function=warn_if_pathlike)
def _process_call_args(args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Tuple[Tuple[Any, ...], Dict[str, Any]]:
"""Processes all positional and keyword arguments before they get passed to the caller queue and sent to the
LightningWork.
Currently, this method only applies sanitization to Lightning Path objects.
Args:
args: The tuple of positional arguments passed to the run method.
kwargs: The dictionary of named arguments passed to the run method.
Returns:
The positional and keyword arguments in the same order they were passed in.
"""
def sanitize(obj: Union[Path, Drive]) -> Union[Path, Dict]:
if isinstance(obj, Path):
# create a copy of the Path and erase the consumer
# the LightningWork on the receiving end of the caller queue will become the new consumer
# this is necessary to make the Path deepdiff-hashable
path_copy = Path(obj)
path_copy._sanitize()
path_copy._consumer = None
return path_copy
return obj.to_dict()
return apply_to_collection((args, kwargs), dtype=(Path, Drive), function=sanitize)
def _convert_hashable(args: Tuple[Any, ...], kwargs: Dict[str, Any]) -> Tuple[Tuple[Any, ...], Dict[str, Any]]:
"""Processes all positional and keyword arguments before they get passed to the caller queue and sent to the
LightningWork.
Currently, this method only applies sanitization to Hashable Objects.
Args:
args: The tuple of positional arguments passed to the run method.
kwargs: The dictionary of named arguments passed to the run method.
Returns:
The positional and keyword arguments in the same order they were passed in.
"""
from lightning.app.utilities.types import Hashable
def sanitize(obj: Hashable) -> Union[Path, Dict]:
return obj.to_dict()
return apply_to_collection((args, kwargs), dtype=Hashable, function=sanitize)
def unwrap(fn):
if isinstance(fn, partial):
fn = fn.keywords["work_run"]
if isinstance(fn, ProxyWorkRun):
fn = fn.work_run
while hasattr(fn, "__wrapped__"):
fn = fn.__wrapped__
return fn | null |
155,605 | import os
import pathlib
import queue
import signal
import sys
import threading
import time
import traceback
import warnings
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from functools import partial
from threading import Event, Thread
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Optional, Set, Tuple, Type, Union
from deepdiff import DeepDiff, Delta
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.core import constants
from lightning.app.core.queues import MultiProcessQueue
from lightning.app.storage.copier import _Copier, _copy_files
from lightning.app.storage.drive import Drive, _maybe_create_drive
from lightning.app.storage.path import Path, _path_to_work_artifact
from lightning.app.storage.payload import Payload
from lightning.app.utilities.app_helpers import affiliation
from lightning.app.utilities.component import _set_work_context
from lightning.app.utilities.enum import (
CacheCallsKeys,
WorkFailureReasons,
WorkStageStatus,
WorkStopReasons,
make_status,
)
from lightning.app.utilities.exceptions import CacheMissException, LightningSigtermStateException
from lightning.app.utilities.app_helpers import Logger
logger = Logger(__name__)
class WorkStageStatus:
NOT_STARTED = "not_started"
STARTED = "started"
STOPPED = "stopped"
PENDING = "pending"
RUNNING = "running"
SUCCEEDED = "succeeded"
FAILED = "failed"
def make_status(stage: str, message: Optional[str] = None, reason: Optional[str] = None):
status = {
"stage": stage,
"timestamp": datetime.now(tz=timezone.utc).timestamp(),
}
if message:
status["message"] = message
if reason:
status["reason"] = reason
return status
class CacheCallsKeys:
LATEST_CALL_HASH = "latest_call_hash"
def _send_data_to_caller_queue(
proxy, work: "LightningWork", caller_queue: "BaseQueue", data: Dict, call_hash: str
) -> Dict:
proxy.has_sent = True
if work._calls[CacheCallsKeys.LATEST_CALL_HASH] is None:
work._calls[CacheCallsKeys.LATEST_CALL_HASH] = call_hash
if call_hash not in work._calls:
work._calls[call_hash] = {"statuses": []}
else:
# remove ret when relaunching the work.
work._calls[call_hash].pop("ret", None)
work._calls[call_hash]["statuses"].append(make_status(WorkStageStatus.PENDING))
work_state = work.state
# There is no need to send all call hashes to the work.
calls = deepcopy(work_state["calls"])
work_state["calls"] = {
k: v for k, v in work_state["calls"].items() if k in (call_hash, CacheCallsKeys.LATEST_CALL_HASH)
}
data.update({"state": work_state})
logger.debug(f"Sending to {work.name}: {data}")
caller_queue.put(deepcopy(data))
# Reset the calls entry.
work_state["calls"] = calls
work._restarting = False
return work_state | null |
155,606 | import os
import pathlib
import queue
import signal
import sys
import threading
import time
import traceback
import warnings
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from functools import partial
from threading import Event, Thread
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Optional, Set, Tuple, Type, Union
from deepdiff import DeepDiff, Delta
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.core import constants
from lightning.app.core.queues import MultiProcessQueue
from lightning.app.storage.copier import _Copier, _copy_files
from lightning.app.storage.drive import Drive, _maybe_create_drive
from lightning.app.storage.path import Path, _path_to_work_artifact
from lightning.app.storage.payload import Payload
from lightning.app.utilities.app_helpers import affiliation
from lightning.app.utilities.component import _set_work_context
from lightning.app.utilities.enum import (
CacheCallsKeys,
WorkFailureReasons,
WorkStageStatus,
WorkStopReasons,
make_status,
)
from lightning.app.utilities.exceptions import CacheMissException, LightningSigtermStateException
from lightning.app.utilities.app_helpers import Logger
logger = Logger(__name__)
def _copy_files(
source_path: pathlib.Path,
destination_path: pathlib.Path,
fs: Optional[AbstractFileSystem] = None,
) -> None:
"""Copy files from one path to another.
The source path must either be an existing file or folder. If the source is a folder, the destination path is
interpreted as a folder as well. If the source is a file, the destination path is interpreted as a file too.
Files in a folder are copied recursively and efficiently using multiple threads.
"""
if fs is None:
fs = _filesystem()
def _copy(from_path: pathlib.Path, to_path: pathlib.Path) -> Optional[Exception]:
_logger.debug(f"Copying {str(from_path)} -> {str(to_path)}")
try:
# NOTE: S3 does not have a concept of directories, so we do not need to create one.
if isinstance(fs, LocalFileSystem):
fs.makedirs(str(to_path.parent), exist_ok=True)
fs.put(str(from_path), str(to_path), recursive=False)
except Exception as ex:
# Return the exception so that it can be handled in the main thread
return ex
# NOTE: Cannot use `S3FileSystem.put(recursive=True)` because it tries to access parent directories
# which it does not have access to.
if source_path.is_dir():
src = [file for file in source_path.rglob("*") if file.is_file()]
dst = [destination_path / file.relative_to(source_path) for file in src]
with concurrent.futures.ThreadPoolExecutor(num_workers) as executor:
results = executor.map(_copy, src, dst)
# Raise the first exception found
exception = next((e for e in results if isinstance(e, Exception)), None)
if exception:
raise exception
else:
if isinstance(fs, LocalFileSystem):
fs.makedirs(str(destination_path.parent), exist_ok=True)
fs.put(str(source_path), str(destination_path))
class Path(PathlibPath):
"""A drop-in replacement for :class:`pathlib.Path` for all paths in Lightning.
The Lightning Path works exactly the same as :class:`pathlib.Path` but it also remembers in which LightningWork
it was created. If the Path gets passed to a different LightningWork, the file or folder can then be easily
accessed no matter where it is located in the other Work's filesystem.
Args:
*args: Accepts the same arguments as in :class:`pathlib.Path`
**kwargs: Accepts the same keyword arguments as in :class:`pathlib.Path`
"""
def _from_parts(cls, args: Any, **__unused) -> "Path":
"""This gets called from the super class in ``pathlib.Path.__new__``.
The Lightning Path overrides this to validate the instantiation in the case parts are passed in individually. In
such a case we need to validate that all parts have the same `origin` and if not, an error is raised.
"""
if args and isinstance(args[0], str) and args[0].startswith("lit://"):
parts = list(args)
parts[0] = parts[0][len("lit://") :]
args = (_storage_root_dir(), *parts)
if (sys.version_info.major, sys.version_info.minor) < (3, 10):
__unused.setdefault("init", True)
new_path = super()._from_parts(args, **__unused)
else:
new_path = super()._from_parts(args)
new_path._init_attributes() # we use this instead of defining a __init__() method
paths_from_parts = [part for part in args if isinstance(part, Path)]
if not paths_from_parts:
return new_path
top_path = paths_from_parts[0]
origins = [part._origin for part in paths_from_parts]
if not all(origins[0] == origin or origin is None for origin in origins):
raise TypeError(
"Tried to instantiate a Lightning Path from multiple other Paths that originate from different"
" LightningWork."
)
new_path._copy_properties_from(top_path)
return new_path
def _init_attributes(self):
self._name: Optional[str] = None
# the origin is the work that created this Path and wants to expose file(s)
self._origin: Optional[Union["LightningWork", str]] = None
# the consumer is the Work that needs access to the file(s) from the consumer
self._consumer: Optional[Union["LightningWork", str]] = None
self._metadata = {}
# request queue: used to transfer message to storage orchestrator
self._request_queue: Optional[BaseQueue] = None
# response queue: used to receive status message from storage orchestrator
self._response_queue: Optional[BaseQueue] = None
def origin_name(self) -> str:
"""The name of the LightningWork where this path was first created.
Attaching a Path to a LightningWork will automatically make it the `origin`.
"""
from lightning.app.core.work import LightningWork
return self._origin.name if isinstance(self._origin, LightningWork) else self._origin
def consumer_name(self) -> str:
"""The name of the LightningWork where this path is being accessed.
By default, this is the same as the :attr:`origin_name`.
"""
from lightning.app.core.work import LightningWork
return self._consumer.name if isinstance(self._consumer, LightningWork) else self._consumer
def hash(self) -> Optional[str]:
"""The hash of this Path uniquely identifies the file path and the associated origin Work.
Returns ``None`` if the origin is not defined, i.e., this Path did not yet get attached to a LightningWork.
"""
if self._origin is None:
return None
contents = f"{self.origin_name}/{self}"
return hashlib.sha1(contents.encode("utf-8")).hexdigest()
def parents(self) -> Sequence["Path"]:
parents: List["Path"] = list(super().parents)
for parent in parents:
parent._copy_properties_from(self)
return parents
def parent(self) -> "Path":
parent: Path = super().parent
parent._copy_properties_from(self)
return parent
def exists(self) -> bool:
"""Check if the path exists locally or remotely.
If the path exists locally, this method immediately returns ``True``, otherwise it will make a RPC call
to the attached origin Work and check if the path exists remotely.
If you strictly want to check local existence only, use :meth:`exists_local` instead. If you strictly want
to check existence on the remote (regardless of whether the file exists locally or not), use
:meth:`exists_remote`.
"""
return self.exists_local() or (self._origin and self.exists_remote())
def exists_local(self) -> bool:
"""Check if the path exists locally."""
return super().exists()
def exists_remote(self) -> bool:
"""Check if the path exists remotely on the attached orgin Work.
Raises:
RuntimeError: If the path is not attached to any Work (origin undefined).
"""
# Fail early if we need to check the remote but an origin is not defined
if not self._origin or self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to check if the file {self} exists, but the path is not attached to a LightningWork."
f" Set it as an attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue that with a request for a path existence check
request = _ExistsRequest(source=self.origin_name, path=str(self), name=self._name, hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the response to come back
response: _ExistsResponse = self._response_queue.get() # blocking
return response.exists
def get(self, overwrite: bool = False) -> None:
if _is_flow_context():
raise RuntimeError("`Path.get()` can only be called from within the `run()` method of LightningWork.")
if self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningApp."
f" Are you trying to get the file from within `__init__`?"
)
if self._origin is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningWork. Set it as an"
f" attribute to a LightningWork or pass it to the `run()` method."
)
if self.exists_local() and not overwrite:
raise FileExistsError(
f"The file or folder {self} exists locally. Pass `overwrite=True` if you wish to replace it"
f" with the new contents."
)
# 1. Send message to orchestrator through queue with details of the transfer
# the source is the name of the work that owns the file that we request
# the destination is determined by the queue, since each work has a dedicated send and recv queue
request = _GetRequest(source=self.origin_name, path=str(self), hash=self.hash, name=self._name)
self._request_queue.put(request)
# 2. Wait for the transfer to finish
response: _GetResponse = self._response_queue.get() # blocking
self._validate_get_response(response)
fs = _filesystem()
# 3. Wait until the file appears in shared storage
while not fs.exists(response.path) or fs.info(response.path)["size"] != response.size:
sleep(REMOTE_STORAGE_WAIT)
if self.exists_local() and self.is_dir():
# Delete the directory, otherwise we can't overwrite it
shutil.rmtree(self)
# 4. Copy the file from the shared storage to the destination on the local filesystem
if fs.isdir(response.path):
if isinstance(fs, LocalFileSystem):
shutil.copytree(response.path, self.resolve())
else:
glob = f"{str(response.path)}/**"
_logger.debug(f"Attempting to copy {glob} -> {str(self.absolute())}")
fs.get(glob, str(self.absolute()), recursive=False)
else:
_logger.debug(f"Attempting to copy {str(response.path)} -> {str(self.absolute())}")
fs.get(str(response.path), str(self.absolute()), recursive=False)
def to_dict(self) -> dict:
"""Serialize this Path to a dictionary."""
return {
"path": str(self),
"origin_name": self.origin_name,
"consumer_name": self.consumer_name,
"metadata": self._metadata,
}
def from_dict(cls, content: dict) -> "Path":
"""Instantiate a Path from a dictionary."""
path = cls(content["path"])
path._origin = content["origin_name"]
path._consumer = content["consumer_name"]
path._metadata = content["metadata"]
return path
def _validate_get_response(self, response: "_GetResponse") -> None:
if response.source != self._origin or response.hash != self.hash:
raise RuntimeError(
f"Tried to get the file {self} but received a response for a request it did not send. The response"
f" contents are: {response}"
)
if response.exception is not None:
raise RuntimeError(
f"An exception was raised while trying to transfer the contents at {response.path}"
f" from Work {response.source} to {response.destination}. See the full stacktrace above."
) from response.exception
def _attach_work(self, work: "LightningWork") -> None:
"""Attach a LightningWork to this Path.
The first work to be attached becomes the `origin`, i.e., the Work that is meant to expose the file to other
Work. Attaching a Work to a Path that already has an `origin` Work will make it a `consumer`. A consumer Work
is a work that can access the file only by first transferring it via :meth:`transfer`.
Args:
work: LightningWork to be attached to this Path.
"""
if self._origin is None:
# Can become an owner only if there is not already one
self._origin = work
self._consumer = work
def _attach_queues(self, request_queue: BaseQueue, response_queue: BaseQueue) -> None:
"""Attaches the queues for communication with the Storage Orchestrator."""
self._request_queue = request_queue
self._response_queue = response_queue
def _sanitize(self) -> None:
"""Sanitize this Path so that it can be deep-copied."""
self._origin = self.origin_name
self._consumer = self.consumer_name
self._request_queue = None
self._response_queue = None
def _copy_properties_from(self, other: "Path") -> None:
self._origin = other._origin
self._consumer = other._consumer
self._metadata = other._metadata
self._request_queue = other._request_queue
self._response_queue = other._response_queue
def with_name(self, name: str) -> "Path":
path: Path = super().with_name(name)
path._copy_properties_from(self)
return path
def with_stem(self, stem: str) -> "Path":
path: Path = super().with_stem(stem)
path._copy_properties_from(self)
return path
def with_suffix(self, suffix: str) -> "Path":
path: Path = super().with_suffix(suffix)
path._copy_properties_from(self)
return path
def relative_to(self, *other) -> "Path":
path: Path = super().relative_to(*other)
path._copy_properties_from(self)
return path
def __truediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__truediv__(other)
path._copy_properties_from(self)
return path
def __rtruediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__rtruediv__(other)
path._copy_properties_from(self)
return path
def __reduce__(self):
return Path.from_dict, (self.to_dict(),)
def __json__(self) -> dict:
"""Converts the Path to a json-serializable dict object."""
return self.to_dict()
def _handle_exists_request(work: "LightningWork", request: _ExistsRequest) -> _ExistsResponse:
return _ExistsResponse(
source=request.source,
name=request.name,
hash=request.hash,
path=request.path,
destination=request.destination,
exists=os.path.exists(request.path),
)
def _handle_get_request(work: "LightningWork", request: _GetRequest) -> _GetResponse:
from lightning.app.storage.copier import _copy_files
source_path = pathlib.Path(request.path)
destination_path = _shared_storage_path() / request.hash
response = _GetResponse(
source=request.source,
name=request.name,
path=str(destination_path),
hash=request.hash,
size=source_path.stat().st_size,
destination=request.destination,
)
try:
_copy_files(source_path, destination_path)
_logger.debug(f"All files copied from {request.path} to {response.path}.")
except Exception as ex:
response.exception = ex
return response
def _path_to_work_artifact(path: Union[Path, pathlib.Path, str], work: "LightningWork") -> pathlib.Path:
return _artifacts_path(work) / pathlib.Path(*pathlib.Path(path).absolute().parts[1:])
class Payload(_BasePayload):
"""The Payload object enables to transfer python objects from one work to another in a similar fashion as
:class:`~lightning.app.storage.path.Path`."""
def save(self, obj: Any, path: str) -> None:
with open(path, "wb") as f:
pickle.dump(obj, f)
def load(self, path: str) -> Any:
with open(path, "rb") as f:
return pickle.load(f)
The provided code snippet includes necessary dependencies for implementing the `persist_artifacts` function. Write a Python function `def persist_artifacts(work: "LightningWork") -> None` to solve the following problem:
Copies all :class:`~lightning.app.storage.path.Path` referenced by the given LightningWork to the shared storage. Files that don't exist or do not originate from the given Work will be skipped.
Here is the function:
def persist_artifacts(work: "LightningWork") -> None:
"""Copies all :class:`~lightning.app.storage.path.Path` referenced by the given LightningWork to the shared
storage.
Files that don't exist or do not originate from the given Work will be skipped.
"""
artifact_paths = [getattr(work, name) for name in work._paths]
# only copy files that belong to this Work, i.e., when the path's origin refers to the current Work
artifact_paths = [path for path in artifact_paths if isinstance(path, Path) and path.origin_name == work.name]
for name in work._state:
if isinstance(getattr(work, name), Payload):
artifact_path = pathlib.Path(name).resolve()
payload = getattr(work, name)
payload.save(payload.value, artifact_path)
artifact_paths.append(artifact_path)
missing_artifacts: Set[str] = set()
destination_paths = []
for artifact_path in artifact_paths:
artifact_path = pathlib.Path(artifact_path).absolute()
if not artifact_path.exists():
missing_artifacts.add(str(artifact_path))
continue
destination_path = _path_to_work_artifact(artifact_path, work)
_copy_files(artifact_path, destination_path)
destination_paths.append(destination_path)
if missing_artifacts:
warnings.warn(
f"{len(missing_artifacts)} artifacts could not be saved because they don't exist:"
f" {','.join(missing_artifacts)}.",
UserWarning,
)
else:
logger.debug(
f"All {destination_paths} artifacts from Work {work.name} successfully "
"stored at {artifacts_path(work.name)}."
) | Copies all :class:`~lightning.app.storage.path.Path` referenced by the given LightningWork to the shared storage. Files that don't exist or do not originate from the given Work will be skipped. |
155,607 | import os
import pathlib
import queue
import signal
import sys
import threading
import time
import traceback
import warnings
from contextlib import contextmanager
from copy import deepcopy
from dataclasses import dataclass, field
from functools import partial
from threading import Event, Thread
from typing import TYPE_CHECKING, Any, Callable, Dict, Generator, Optional, Set, Tuple, Type, Union
from deepdiff import DeepDiff, Delta
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.core import constants
from lightning.app.core.queues import MultiProcessQueue
from lightning.app.storage.copier import _Copier, _copy_files
from lightning.app.storage.drive import Drive, _maybe_create_drive
from lightning.app.storage.path import Path, _path_to_work_artifact
from lightning.app.storage.payload import Payload
from lightning.app.utilities.app_helpers import affiliation
from lightning.app.utilities.component import _set_work_context
from lightning.app.utilities.enum import (
CacheCallsKeys,
WorkFailureReasons,
WorkStageStatus,
WorkStopReasons,
make_status,
)
from lightning.app.utilities.exceptions import CacheMissException, LightningSigtermStateException
from lightning.app.utilities.app_helpers import Logger
class WorkStateObserver(Thread):
"""This thread runs alongside LightningWork and periodically checks for state changes. If the state changed from
one interval to the next, it will compute the delta and add it to the queue which is connected to the Flow. This
enables state changes to be captured that are not triggered through a setattr call.
Args:
work: The LightningWork for which the state should be monitored
delta_queue: The queue to send deltas to when state changes occur
interval: The interval at which to check for state changes.
Example:
class Work(LightningWork):
...
def run(self):
# This update gets sent to the Flow once the thread compares the new state with the previous one
self.list.append(1)
"""
def __init__(
self,
work: "LightningWork",
delta_queue: "BaseQueue",
flow_to_work_delta_queue: Optional["BaseQueue"] = None,
error_queue: Optional["BaseQueue"] = None,
interval: float = 1,
) -> None:
super().__init__(daemon=True)
self.started = False
self._work = work
self._delta_queue = delta_queue
self._flow_to_work_delta_queue = flow_to_work_delta_queue
self._error_queue = error_queue
self._interval = interval
self._exit_event = Event()
self._delta_memory = []
self._last_state = deepcopy(self._work.state)
def run(self) -> None:
self.started = True
while not self._exit_event.is_set():
time.sleep(self._interval)
# Run the thread only if active
self.run_once()
def get_state_changed_from_queue(q: "BaseQueue", timeout: Optional[int] = None):
try:
delta = q.get(timeout=timeout or q.default_timeout)
return delta
except queue.Empty:
return None
def run_once(self) -> None:
with _state_observer_lock:
# Add all deltas the LightningWorkSetAttrProxy has processed and sent to the Flow already while
# the WorkStateObserver was sleeping
for delta in self._delta_memory:
self._last_state += delta
self._delta_memory.clear()
# The remaining delta is the result of state updates triggered outside the setattr, e.g, by a list append
delta = Delta(DeepDiff(self._last_state, self._work.state, verbose_level=2))
if not delta.to_dict():
return
self._last_state = deepcopy(self._work.state)
self._delta_queue.put(ComponentDelta(id=self._work.name, delta=delta))
if self._flow_to_work_delta_queue:
while True:
deep_diff = self.get_state_changed_from_queue(self._flow_to_work_delta_queue)
if not isinstance(deep_diff, dict):
break
try:
with _state_observer_lock:
self._work.apply_flow_delta(Delta(deep_diff, raise_errors=True))
except Exception as ex:
print(traceback.print_exc())
self._error_queue.put(ex)
raise ex
def join(self, timeout: Optional[float] = None) -> None:
self._exit_event.set()
super().join(timeout)
class LightningWorkSetAttrProxy:
"""This wrapper around the ``LightningWork.__setattr__`` ensures that state changes get sent to the delta queue to
be reflected in the Flow.
Example:
class Work(LightningWork):
...
def run(self):
self.var += 1 # This update gets sent to the Flow immediately
"""
work_name: str
work: "LightningWork"
delta_queue: "BaseQueue"
state_observer: Optional["WorkStateObserver"]
def __call__(self, name: str, value: Any) -> None:
logger.debug(f"Setting {name}: {value}")
with _state_observer_lock:
state = deepcopy(self.work.state)
self.work._default_setattr(name, value)
delta = Delta(DeepDiff(state, self.work.state, verbose_level=2))
if not delta.to_dict():
return
# push the delta only if there is any
self.delta_queue.put(ComponentDelta(id=self.work_name, delta=delta))
# add the delta to the buffer to let WorkStateObserver know we already sent this one to the Flow
if self.state_observer:
self.state_observer._delta_memory.append(delta)
def _proxy_setattr(work, delta_queue, state_observer: Optional[WorkStateObserver], cleanup: bool = False):
if cleanup:
setattr_proxy = None
else:
setattr_proxy = LightningWorkSetAttrProxy(
work.name,
work,
delta_queue=delta_queue,
state_observer=state_observer,
)
work._setattr_replacement = setattr_proxy | null |
155,608 | import os
import socket
from typing import Optional
from lightning_cloud.openapi import AppinstancesIdBody, Externalv1LightningappInstance, V1NetworkConfig
from lightning.app.utilities.network import LightningClient, find_free_network_port
The provided code snippet includes necessary dependencies for implementing the `enable_port` function. Write a Python function `def enable_port() -> V1NetworkConfig` to solve the following problem:
Make a request to the cloud controlplane to open a port of the flow.
Here is the function:
def enable_port() -> V1NetworkConfig:
"""Make a request to the cloud controlplane to open a port of the flow."""
app_id = os.getenv("LIGHTNING_CLOUD_APP_ID", None)
project_id = os.getenv("LIGHTNING_CLOUD_PROJECT_ID", None)
if not app_id or not project_id:
raise Exception("The app_id and project_id should be defined.")
client = LightningClient()
list_apps_resp = client.lightningapp_instance_service_list_lightningapp_instances(project_id=project_id)
lit_app: Optional[Externalv1LightningappInstance] = None
for lapp in list_apps_resp.lightningapps:
if lapp.id == app_id:
lit_app = lapp
if not lit_app:
raise RuntimeError(
"App was not found. Please open an issue at https://github.com/lightning-AI/lightning/issues."
)
found_nc = None
for nc in lit_app.spec.network_config:
if not nc.enable:
found_nc = nc
nc.enable = True
break
client.lightningapp_instance_service_update_lightningapp_instance(
project_id=project_id,
id=lit_app.id,
body=AppinstancesIdBody(name=lit_app.name, spec=lit_app.spec),
)
if not found_nc:
raise RuntimeError(
"No available port was found. Please open an issue at https://github.com/lightning-AI/lightning/issues."
)
return found_nc | Make a request to the cloud controlplane to open a port of the flow. |
155,609 | import os
import socket
from typing import Optional
from lightning_cloud.openapi import AppinstancesIdBody, Externalv1LightningappInstance, V1NetworkConfig
from lightning.app.utilities.network import LightningClient, find_free_network_port
The provided code snippet includes necessary dependencies for implementing the `disable_port` function. Write a Python function `def disable_port(port: int, ignore_disabled: bool = True) -> None` to solve the following problem:
Make a request to the cloud controlplane to close a port of the flow.
Here is the function:
def disable_port(port: int, ignore_disabled: bool = True) -> None:
"""Make a request to the cloud controlplane to close a port of the flow."""
app_id = os.getenv("LIGHTNING_CLOUD_APP_ID", None)
project_id = os.getenv("LIGHTNING_CLOUD_PROJECT_ID", None)
if not app_id or not project_id:
raise Exception("The app_id and project_id should be defined.")
client = LightningClient()
list_apps_resp = client.lightningapp_instance_service_list_lightningapp_instances(project_id=project_id)
lit_app: Optional[Externalv1LightningappInstance] = None
for lapp in list_apps_resp.lightningapps:
if lapp.id == app_id:
lit_app = lapp
if not lit_app:
raise RuntimeError(
"App was not found. Please open an issue at https://github.com/lightning-AI/lightning/issues."
)
found_nc = None
for nc in lit_app.spec.network_config:
if nc.port == port:
if not nc.enable and not ignore_disabled:
raise RuntimeError(f"The port {port} was already disabled.")
nc.enable = False
found_nc = nc
break
client.lightningapp_instance_service_update_lightningapp_instance(
project_id=project_id,
id=lit_app.id,
body=AppinstancesIdBody(name=lit_app.name, spec=lit_app.spec),
)
if not found_nc:
ports = [nc.port for nc in lit_app.spec.network_config]
raise ValueError(f"The provided port doesn't exists. Available ports are {ports}.")
assert found_nc | Make a request to the cloud controlplane to close a port of the flow. |
155,610 | import inspect
import warnings
from typing import Dict, List, Union
import lightning.app
from lightning.app.frontend.frontend import Frontend
from lightning.app.utilities.app_helpers import _MagicMockJsonSerializable, is_overridden
from lightning.app.utilities.cloud import is_running_in_cloud
def _add_comment_to_literal_code(method, contains, comment):
"""Inspects a method's code and adds a message to it.
This is a nice to have, so if it fails for some reason, it shouldn't affect the program.
"""
try:
lines = inspect.getsource(method)
lines = lines.split("\n")
idx_list = [i for i, x in enumerate(lines) if contains in x]
for i in idx_list:
line = lines[i]
line += comment
lines[i] = line
return "\n".join(lines)
except Exception:
return ""
def _collect_content_layout(
layout: List[Dict], app: "lightning.app.LightningApp", flow: "lightning.app.LightningFlow"
) -> Union[List[Dict], Dict]:
"""Process the layout returned by the ``configure_layout()`` method if the returned format represents an
aggregation of child layouts."""
for entry in layout:
if "content" not in entry:
raise ValueError(
f"A dictionary returned by `{flow.__class__.__name__}.configure_layout()` is missing a key 'content'."
f" For the value, choose either a reference to a child flow or a URla."
)
if isinstance(entry["content"], str): # assume this is a URL
url = entry["content"]
if url.startswith("/"):
# The URL isn't fully defined yet. Looks something like ``self.work.url + /something``.
entry["target"] = ""
else:
entry["target"] = url
if url.startswith("http://") and is_running_in_cloud():
warnings.warn(
f"You configured an http link {url[:32]}... but it won't be accessible in the cloud."
f" Consider replacing 'http' with 'https' in the link above."
)
elif isinstance(entry["content"], lightning.app.LightningFlow):
entry["content"] = entry["content"].name
elif isinstance(entry["content"], lightning.app.LightningWork):
work = entry["content"]
work_layout = _collect_work_layout(work)
if work_layout is None:
entry["content"] = ""
elif isinstance(work_layout, str):
entry["content"] = work_layout
entry["target"] = work_layout
elif isinstance(work_layout, (Frontend, _MagicMockJsonSerializable)):
if len(layout) > 1:
lines = _add_comment_to_literal_code(
flow.configure_layout, contains="return", comment=" <------- this guy"
)
m = f"""
The return value of configure_layout() in `{flow.__class__.__name__}` is an
unsupported format:
\n{lines}
The tab containing a `{work.__class__.__name__}` must be the only tab in the
layout of this flow.
(see the docs for `LightningWork.configure_layout`).
"""
raise TypeError(m)
if isinstance(work_layout, Frontend):
# If the work returned a frontend, treat it as belonging to the flow.
# NOTE: This could evolve in the future to run the Frontend directly in the work machine.
frontend = work_layout
frontend.flow = flow
elif isinstance(work_layout, _MagicMockJsonSerializable):
# The import was mocked, we set a dummy `Frontend` so that `is_headless` knows there is a UI.
frontend = "mock"
app.frontends.setdefault(flow.name, frontend)
return flow._layout
elif isinstance(entry["content"], _MagicMockJsonSerializable):
# The import was mocked, we just record dummy content so that `is_headless` knows there is a UI
entry["content"] = "mock"
entry["target"] = "mock"
else:
m = f"""
A dictionary returned by `{flow.__class__.__name__}.configure_layout()` contains an unsupported entry.
{{'content': {repr(entry["content"])}}}
Set the `content` key to a child flow or a URL, for example:
class {flow.__class__.__name__}(LightningFlow):
def configure_layout(self):
return {{'content': childFlow OR childWork OR 'http://some/url'}}
"""
raise ValueError(m)
return layout
class Frontend(ABC):
"""Base class for any frontend that gets exposed by LightningFlows.
The flow attribute will be set by the app while bootstrapping.
"""
def __init__(self) -> None:
self.flow: Optional["LightningFlow"] = None
def start_server(self, host: str, port: int, root_path: str = "") -> None:
"""Start the process that serves the UI at the given hostname and port number.
Arguments:
host: The hostname where the UI will be served. This gets determined by the dispatcher (e.g., cloud),
but defaults to localhost when running locally.
port: The port number where the UI will be served. This gets determined by the dispatcher, which by default
chooses any free port when running locally.
root_path: root_path for the server if app in exposed via a proxy at `/<root_path>`
Example:
An custom implementation could look like this:
.. code-block:: python
def start_server(self, host, port, root_path=""):
self._process = subprocess.Popen(["flask", "run" "--host", host, "--port", str(port)])
"""
def stop_server(self) -> None:
"""Stop the process that was started with :meth:`start_server` so the App can shut down.
This method gets called when the LightningApp terminates.
Example:
.. code-block:: python
def stop_server(self):
self._process.kill()
"""
class _MagicMockJsonSerializable(MagicMock):
def __json__():
return "{}"
The provided code snippet includes necessary dependencies for implementing the `_collect_layout` function. Write a Python function `def _collect_layout(app: "lightning.app.LightningApp", flow: "lightning.app.LightningFlow") -> Union[Dict, List[Dict]]` to solve the following problem:
Process the layout returned by the ``configure_layout()`` method in each flow.
Here is the function:
def _collect_layout(app: "lightning.app.LightningApp", flow: "lightning.app.LightningFlow") -> Union[Dict, List[Dict]]:
"""Process the layout returned by the ``configure_layout()`` method in each flow."""
layout = flow.configure_layout()
if isinstance(layout, Frontend):
frontend = layout
frontend.flow = flow
app.frontends.setdefault(flow.name, frontend)
# When running locally, the target will get overwritten by the dispatcher when launching the frontend servers
# When running in the cloud, the frontend code will construct the URL based on the flow name
return flow._layout
if isinstance(layout, _MagicMockJsonSerializable):
# The import was mocked, we set a dummy `Frontend` so that `is_headless` knows there is a UI
app.frontends.setdefault(flow.name, "mock")
return flow._layout
if isinstance(layout, dict):
layout = _collect_content_layout([layout], app, flow)
elif isinstance(layout, (list, tuple)) and all(isinstance(item, dict) for item in layout):
layout = _collect_content_layout(layout, app, flow)
else:
lines = _add_comment_to_literal_code(flow.configure_layout, contains="return", comment=" <------- this guy")
raise TypeError(
f"""
The return value of configure_layout() in `{flow.__class__.__name__}` is an unsupported layout format:
\n{lines}
Return either an object of type {Frontend} (e.g., StreamlitFrontend, StaticWebFrontend):
def configure_layout(self):
return la.frontend.Frontend(...)
OR a single dict:
def configure_layout(self):
tab1 = {{'name': 'tab name', 'content': self.a_component}}
return tab1
OR a list of dicts:
def configure_layout(self):
tab1 = {{'name': 'tab name 1', 'content': self.component_a}}
tab2 = {{'name': 'tab name 2', 'content': self.component_b}}
return [tab1, tab2]
(see the docs for `LightningFlow.configure_layout`).
"""
)
return layout | Process the layout returned by the ``configure_layout()`` method in each flow. |
155,611 | import random
from lightning_cloud.openapi import ProjectIdProjectclustersbindingsBody, V1ClusterType
from lightning_cloud.openapi.rest import ApiException
from lightning.app.utilities.network import LightningClient
def _ensure_cluster_project_binding(client: LightningClient, project_id: str, cluster_id: str) -> None:
cluster_bindings = client.projects_service_list_project_cluster_bindings(project_id=project_id)
for cluster_binding in cluster_bindings.clusters:
if cluster_binding.cluster_id != cluster_id:
continue
if cluster_binding.project_id == project_id:
return
client.projects_service_create_project_cluster_binding(
project_id=project_id,
body=ProjectIdProjectclustersbindingsBody(cluster_id=cluster_id),
) | null |
155,612 | import random
from lightning_cloud.openapi import ProjectIdProjectclustersbindingsBody, V1ClusterType
from lightning_cloud.openapi.rest import ApiException
from lightning.app.utilities.network import LightningClient
The provided code snippet includes necessary dependencies for implementing the `_get_default_cluster` function. Write a Python function `def _get_default_cluster(client: LightningClient, project_id: str) -> str` to solve the following problem:
This utility implements a minimal version of the cluster selection logic used in the cloud. TODO: This should be requested directly from the platform.
Here is the function:
def _get_default_cluster(client: LightningClient, project_id: str) -> str:
"""This utility implements a minimal version of the cluster selection logic used in the cloud.
TODO: This should be requested directly from the platform.
"""
cluster_bindings = client.projects_service_list_project_cluster_bindings(project_id=project_id).clusters
if not cluster_bindings:
raise ValueError(f"No clusters are bound to the project {project_id}.")
if len(cluster_bindings) == 1:
return cluster_bindings[0].cluster_id
clusters = []
for cluster_binding in cluster_bindings:
try:
clusters.append(client.cluster_service_get_cluster(cluster_binding.cluster_id))
except ApiException:
# If we failed to get the cluster, ignore it
continue
# Filter global clusters
clusters = [cluster for cluster in clusters if cluster.spec.cluster_type == V1ClusterType.GLOBAL]
if len(clusters) == 0:
raise RuntimeError(f"No clusters found on `{client.api_client.configuration.host}`.")
return random.choice(clusters).id # noqa: S311 | This utility implements a minimal version of the cluster selection logic used in the cloud. TODO: This should be requested directly from the platform. |
155,613 | import enum
import json
import os
from copy import deepcopy
from time import sleep
from typing import Any, Dict, List, Optional, Tuple, Union
from deepdiff import DeepDiff
from requests import Session
from requests.exceptions import ConnectionError
from lightning.app.core.constants import APP_SERVER_HOST, APP_SERVER_PORT
from lightning.app.storage.drive import _maybe_create_drive
from lightning.app.utilities.app_helpers import AppStatePlugin, BaseStatePlugin, Logger
from lightning.app.utilities.network import LightningClient, _configure_session
def headers_for(context: Dict[str, str]) -> Dict[str, str]:
return {
"X-Lightning-Session-UUID": context.get("token", ""),
"X-Lightning-Session-ID": context.get("session_id", ""),
"X-Lightning-Type": context.get("type", ""),
} | null |
155,614 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
def breadth_first(root: "Component", types: Type["ComponentTuple"] = None):
"""Returns a generator that walks through the tree of components breadth-first.
Arguments:
root: The root component of the tree
types: If provided, only the component types in this list will be visited.
"""
yield from _BreadthFirstVisitor(root, types)
class Path(PathlibPath):
"""A drop-in replacement for :class:`pathlib.Path` for all paths in Lightning.
The Lightning Path works exactly the same as :class:`pathlib.Path` but it also remembers in which LightningWork
it was created. If the Path gets passed to a different LightningWork, the file or folder can then be easily
accessed no matter where it is located in the other Work's filesystem.
Args:
*args: Accepts the same arguments as in :class:`pathlib.Path`
**kwargs: Accepts the same keyword arguments as in :class:`pathlib.Path`
"""
def _from_parts(cls, args: Any, **__unused) -> "Path":
"""This gets called from the super class in ``pathlib.Path.__new__``.
The Lightning Path overrides this to validate the instantiation in the case parts are passed in individually. In
such a case we need to validate that all parts have the same `origin` and if not, an error is raised.
"""
if args and isinstance(args[0], str) and args[0].startswith("lit://"):
parts = list(args)
parts[0] = parts[0][len("lit://") :]
args = (_storage_root_dir(), *parts)
if (sys.version_info.major, sys.version_info.minor) < (3, 10):
__unused.setdefault("init", True)
new_path = super()._from_parts(args, **__unused)
else:
new_path = super()._from_parts(args)
new_path._init_attributes() # we use this instead of defining a __init__() method
paths_from_parts = [part for part in args if isinstance(part, Path)]
if not paths_from_parts:
return new_path
top_path = paths_from_parts[0]
origins = [part._origin for part in paths_from_parts]
if not all(origins[0] == origin or origin is None for origin in origins):
raise TypeError(
"Tried to instantiate a Lightning Path from multiple other Paths that originate from different"
" LightningWork."
)
new_path._copy_properties_from(top_path)
return new_path
def _init_attributes(self):
self._name: Optional[str] = None
# the origin is the work that created this Path and wants to expose file(s)
self._origin: Optional[Union["LightningWork", str]] = None
# the consumer is the Work that needs access to the file(s) from the consumer
self._consumer: Optional[Union["LightningWork", str]] = None
self._metadata = {}
# request queue: used to transfer message to storage orchestrator
self._request_queue: Optional[BaseQueue] = None
# response queue: used to receive status message from storage orchestrator
self._response_queue: Optional[BaseQueue] = None
def origin_name(self) -> str:
"""The name of the LightningWork where this path was first created.
Attaching a Path to a LightningWork will automatically make it the `origin`.
"""
from lightning.app.core.work import LightningWork
return self._origin.name if isinstance(self._origin, LightningWork) else self._origin
def consumer_name(self) -> str:
"""The name of the LightningWork where this path is being accessed.
By default, this is the same as the :attr:`origin_name`.
"""
from lightning.app.core.work import LightningWork
return self._consumer.name if isinstance(self._consumer, LightningWork) else self._consumer
def hash(self) -> Optional[str]:
"""The hash of this Path uniquely identifies the file path and the associated origin Work.
Returns ``None`` if the origin is not defined, i.e., this Path did not yet get attached to a LightningWork.
"""
if self._origin is None:
return None
contents = f"{self.origin_name}/{self}"
return hashlib.sha1(contents.encode("utf-8")).hexdigest()
def parents(self) -> Sequence["Path"]:
parents: List["Path"] = list(super().parents)
for parent in parents:
parent._copy_properties_from(self)
return parents
def parent(self) -> "Path":
parent: Path = super().parent
parent._copy_properties_from(self)
return parent
def exists(self) -> bool:
"""Check if the path exists locally or remotely.
If the path exists locally, this method immediately returns ``True``, otherwise it will make a RPC call
to the attached origin Work and check if the path exists remotely.
If you strictly want to check local existence only, use :meth:`exists_local` instead. If you strictly want
to check existence on the remote (regardless of whether the file exists locally or not), use
:meth:`exists_remote`.
"""
return self.exists_local() or (self._origin and self.exists_remote())
def exists_local(self) -> bool:
"""Check if the path exists locally."""
return super().exists()
def exists_remote(self) -> bool:
"""Check if the path exists remotely on the attached orgin Work.
Raises:
RuntimeError: If the path is not attached to any Work (origin undefined).
"""
# Fail early if we need to check the remote but an origin is not defined
if not self._origin or self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to check if the file {self} exists, but the path is not attached to a LightningWork."
f" Set it as an attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue that with a request for a path existence check
request = _ExistsRequest(source=self.origin_name, path=str(self), name=self._name, hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the response to come back
response: _ExistsResponse = self._response_queue.get() # blocking
return response.exists
def get(self, overwrite: bool = False) -> None:
if _is_flow_context():
raise RuntimeError("`Path.get()` can only be called from within the `run()` method of LightningWork.")
if self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningApp."
f" Are you trying to get the file from within `__init__`?"
)
if self._origin is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningWork. Set it as an"
f" attribute to a LightningWork or pass it to the `run()` method."
)
if self.exists_local() and not overwrite:
raise FileExistsError(
f"The file or folder {self} exists locally. Pass `overwrite=True` if you wish to replace it"
f" with the new contents."
)
# 1. Send message to orchestrator through queue with details of the transfer
# the source is the name of the work that owns the file that we request
# the destination is determined by the queue, since each work has a dedicated send and recv queue
request = _GetRequest(source=self.origin_name, path=str(self), hash=self.hash, name=self._name)
self._request_queue.put(request)
# 2. Wait for the transfer to finish
response: _GetResponse = self._response_queue.get() # blocking
self._validate_get_response(response)
fs = _filesystem()
# 3. Wait until the file appears in shared storage
while not fs.exists(response.path) or fs.info(response.path)["size"] != response.size:
sleep(REMOTE_STORAGE_WAIT)
if self.exists_local() and self.is_dir():
# Delete the directory, otherwise we can't overwrite it
shutil.rmtree(self)
# 4. Copy the file from the shared storage to the destination on the local filesystem
if fs.isdir(response.path):
if isinstance(fs, LocalFileSystem):
shutil.copytree(response.path, self.resolve())
else:
glob = f"{str(response.path)}/**"
_logger.debug(f"Attempting to copy {glob} -> {str(self.absolute())}")
fs.get(glob, str(self.absolute()), recursive=False)
else:
_logger.debug(f"Attempting to copy {str(response.path)} -> {str(self.absolute())}")
fs.get(str(response.path), str(self.absolute()), recursive=False)
def to_dict(self) -> dict:
"""Serialize this Path to a dictionary."""
return {
"path": str(self),
"origin_name": self.origin_name,
"consumer_name": self.consumer_name,
"metadata": self._metadata,
}
def from_dict(cls, content: dict) -> "Path":
"""Instantiate a Path from a dictionary."""
path = cls(content["path"])
path._origin = content["origin_name"]
path._consumer = content["consumer_name"]
path._metadata = content["metadata"]
return path
def _validate_get_response(self, response: "_GetResponse") -> None:
if response.source != self._origin or response.hash != self.hash:
raise RuntimeError(
f"Tried to get the file {self} but received a response for a request it did not send. The response"
f" contents are: {response}"
)
if response.exception is not None:
raise RuntimeError(
f"An exception was raised while trying to transfer the contents at {response.path}"
f" from Work {response.source} to {response.destination}. See the full stacktrace above."
) from response.exception
def _attach_work(self, work: "LightningWork") -> None:
"""Attach a LightningWork to this Path.
The first work to be attached becomes the `origin`, i.e., the Work that is meant to expose the file to other
Work. Attaching a Work to a Path that already has an `origin` Work will make it a `consumer`. A consumer Work
is a work that can access the file only by first transferring it via :meth:`transfer`.
Args:
work: LightningWork to be attached to this Path.
"""
if self._origin is None:
# Can become an owner only if there is not already one
self._origin = work
self._consumer = work
def _attach_queues(self, request_queue: BaseQueue, response_queue: BaseQueue) -> None:
"""Attaches the queues for communication with the Storage Orchestrator."""
self._request_queue = request_queue
self._response_queue = response_queue
def _sanitize(self) -> None:
"""Sanitize this Path so that it can be deep-copied."""
self._origin = self.origin_name
self._consumer = self.consumer_name
self._request_queue = None
self._response_queue = None
def _copy_properties_from(self, other: "Path") -> None:
self._origin = other._origin
self._consumer = other._consumer
self._metadata = other._metadata
self._request_queue = other._request_queue
self._response_queue = other._response_queue
def with_name(self, name: str) -> "Path":
path: Path = super().with_name(name)
path._copy_properties_from(self)
return path
def with_stem(self, stem: str) -> "Path":
path: Path = super().with_stem(stem)
path._copy_properties_from(self)
return path
def with_suffix(self, suffix: str) -> "Path":
path: Path = super().with_suffix(suffix)
path._copy_properties_from(self)
return path
def relative_to(self, *other) -> "Path":
path: Path = super().relative_to(*other)
path._copy_properties_from(self)
return path
def __truediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__truediv__(other)
path._copy_properties_from(self)
return path
def __rtruediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__rtruediv__(other)
path._copy_properties_from(self)
return path
def __reduce__(self):
return Path.from_dict, (self.to_dict(),)
def __json__(self) -> dict:
"""Converts the Path to a json-serializable dict object."""
return self.to_dict()
def _handle_exists_request(work: "LightningWork", request: _ExistsRequest) -> _ExistsResponse:
return _ExistsResponse(
source=request.source,
name=request.name,
hash=request.hash,
path=request.path,
destination=request.destination,
exists=os.path.exists(request.path),
)
def _handle_get_request(work: "LightningWork", request: _GetRequest) -> _GetResponse:
from lightning.app.storage.copier import _copy_files
source_path = pathlib.Path(request.path)
destination_path = _shared_storage_path() / request.hash
response = _GetResponse(
source=request.source,
name=request.name,
path=str(destination_path),
hash=request.hash,
size=source_path.stat().st_size,
destination=request.destination,
)
try:
_copy_files(source_path, destination_path)
_logger.debug(f"All files copied from {request.path} to {response.path}.")
except Exception as ex:
response.exception = ex
return response
class LightningFlow:
_INTERNAL_STATE_VARS = {
# Internal protected variables that are still part of the state (even though they are prefixed with "_")
"_paths",
"_layout",
}
def __init__(self) -> None:
"""The LightningFlow is used by the :class:`~lightning.app.core.app.LightningApp` to coordinate and manage
long- running jobs contained, the :class:`~lightning.app.core.work.LightningWork`.
A LightningFlow is characterized by:
* A set of state variables.
* Long-running jobs (:class:`~lightning.app.core.work.LightningWork`).
* Its children ``LightningFlow`` or ``LightningWork`` with their state variables.
**State variables**
The LightningFlow are special classes whose attributes require to be
json-serializable (e.g., int, float, bool, list, dict, ...).
They also may not reach into global variables unless they are constant.
The attributes need to be all defined in `__init__` method,
and eventually assigned to different values throughout the lifetime of the object.
However, defining new attributes outside of `__init__` is not allowed.
Attributes taken together represent the state of the component.
Components are capable of retrieving their state and that of their
children recursively at any time. They are also capable of setting
an externally provided state recursively to its children.
**Execution model and work**
The entry point for execution is the ``run`` method at the root component.
The ``run`` method of the root component may call the ``run`` method of its children, and the children
may call the ``run`` methods of their children and so on.
The ``run`` method of the root component is called repeatedly in a while loop forever until the app gets
terminated. In this programming model (reminiscent of React, Vue or Streamlit from the JavaScript world),
the values of the state variables, or their changes, are translated into actions throughout the component
hierarchy. This means the flow of execution will only be affected by state changes in a component or one of
its children, and otherwise remain idempotent.
The actions themselves are self-contained within :class:`~lightning.app.core.work.LightningWork`.
The :class:`~lightning.app.core.work.LightningWork` are typically used for long-running jobs,
like downloading a dataset, performing a query, starting a computationally heavy script.
While one may access any state variable in a LightningWork from a LightningFlow, one may not
directly call methods of other components from within a LightningWork as LightningWork can't have any children.
This limitation allows applications to be distributed at scale.
**Component hierarchy and App**
Given the above characteristics, a root LightningFlow, potentially containing
children components, can be passed to an App object and its execution
can be distributed (each LightningWork will be run within its own process
or different arrangements).
Example:
>>> from lightning.app import LightningFlow
>>> class RootFlow(LightningFlow):
... def __init__(self):
... super().__init__()
... self.counter = 0
... def run(self):
... self.counter += 1
...
>>> flow = RootFlow()
>>> flow.run()
>>> assert flow.counter == 1
>>> assert flow.state["vars"]["counter"] == 1
"""
self._state: set = set()
self._name: str = ""
self._flows: set = set()
self._works: set = set()
self._structures: set = set()
self._calls: dict = {}
self._changes: dict = {}
self._layout: Union[List[Dict], Dict] = {}
self._paths: dict = {}
self._backend: Optional["Backend"] = None
# tuple instead of a list so that it cannot be modified without using the setter
self._lightningignore: Tuple[str, ...] = ()
def name(self) -> str:
"""Return the current LightningFlow name."""
return self._name or "root"
def __setattr__(self, name: str, value: Any) -> None:
attr = getattr(self.__class__, name, None)
if isinstance(attr, property) and attr.fset is not None:
return attr.fset(self, value)
from lightning.app.structures import Dict as ComponentDict
from lightning.app.structures import List as ComponentList
if (
not _is_init_context(self)
and name not in self._state
and name not in self._paths
and (
not isinstance(value, (LightningWork, LightningFlow))
or (isinstance(value, (LightningWork, LightningFlow)) and not _is_run_context(self))
)
and name not in self._works.union(self._flows)
and self._is_state_attribute(name)
):
raise AttributeError(f"Cannot set attributes that were not defined in __init__: {name}")
if isinstance(value, str) and value.startswith("lit://"):
value = Path(value)
if self._is_state_attribute(name):
if hasattr(self, name):
if name in self._flows and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the flow can't be changed once defined: {name}")
if name in self._works and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the work can't be changed once defined: {name}")
if isinstance(value, (list, dict)) and value:
_type = (LightningFlow, LightningWork, ComponentList, ComponentDict)
if isinstance(value, list) and all(isinstance(va, _type) for va in value):
value = ComponentList(*value)
if isinstance(value, dict) and all(isinstance(va, _type) for va in value.values()):
value = ComponentDict(**value)
if isinstance(value, LightningFlow):
self._flows.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
# Attach the backend to the flow and its children work.
if self._backend:
LightningFlow._attach_backend(value, self._backend)
for work in value.works():
work._register_cloud_compute()
elif isinstance(value, LightningWork):
self._works.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
if self._backend:
self._backend._wrap_run_method(_LightningAppRef().get_current(), value) # type: ignore[arg-type]
value._register_cloud_compute()
elif isinstance(value, (ComponentDict, ComponentList)):
self._structures.add(name)
_set_child_name(self, value, name)
_backend = getattr(self, "backend", None)
if _backend is not None:
value._backend = _backend
for flow in value.flows:
if _backend is not None:
LightningFlow._attach_backend(flow, _backend)
for work in value.works:
work._register_cloud_compute()
if _backend is not None:
_backend._wrap_run_method(_LightningAppRef().get_current(), work)
elif isinstance(value, Path):
# In the init context, the full name of the Flow and Work is not known, i.e., we can't serialize
# the path without losing the information of origin and consumer. Hence, we delay the serialization
# of the path object until the app is instantiated.
if not _is_init_context(self):
self._paths[name] = value.to_dict()
self._state.add(name)
elif isinstance(value, Drive):
value = deepcopy(value)
value.component_name = self.name
self._state.add(name)
elif isinstance(value, CloudCompute):
self._state.add(name)
elif _is_json_serializable(value):
self._state.add(name)
if not isinstance(value, Path) and hasattr(self, "_paths") and name in self._paths:
# The attribute changed type from Path to another
self._paths.pop(name)
else:
raise AttributeError(
f"Only JSON-serializable attributes are currently supported"
f" (str, int, float, bool, tuple, list, dict etc.) to be part of {self} state. "
f"Found the attribute {name} with {value} instead. \n"
"HINT: Private attributes defined as follows `self._x = y` won't be shared between components "
"and therefore don't need to be JSON-serializable."
)
super().__setattr__(name, value)
return None
def _attach_backend(flow: "LightningFlow", backend: "Backend") -> None:
"""Attach the backend to all flows and its children."""
flow._backend = backend
for name in flow._structures:
getattr(flow, name)._backend = backend
for child_flow in flow.flows.values():
child_flow._backend = backend
for name in child_flow._structures:
getattr(child_flow, name)._backend = backend
app = _LightningAppRef().get_current()
for child_work in flow.works():
child_work._backend = backend
backend._wrap_run_method(app, child_work) # type: ignore[arg-type]
def __getattr__(self, item: str) -> Any:
if item in self.__dict__.get("_paths", {}):
return Path.from_dict(self._paths[item])
return self.__getattribute__(item)
def ready(self) -> bool:
"""Override to customize when your App should be ready."""
flows = self.flows
return all(flow.ready for flow in flows.values()) if flows else True
def changes(self) -> dict:
return self._changes.copy()
def state(self) -> dict:
"""Returns the current flow state along its children."""
children_state = {child: getattr(self, child).state for child in self._flows}
works_state = {work: getattr(self, work).state for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state for child in self._structures},
"changes": {},
}
def state_vars(self) -> dict:
children_state = {child: getattr(self, child).state_vars for child in self._flows}
works_state = {work: getattr(self, work).state_vars for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_vars for child in self._structures},
}
def state_with_changes(self) -> dict:
children_state = {child: getattr(self, child).state_with_changes for child in self._flows}
works_state = {work: getattr(self, work).state_with_changes for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_with_changes for child in self._structures},
"changes": self.changes,
}
def flows(self) -> Dict[str, "LightningFlow"]:
"""Return its children LightningFlow."""
flows = {}
for el in sorted(self._flows):
flow = getattr(self, el)
flows[flow.name] = flow
flows.update(flow.flows)
for struct_name in sorted(self._structures):
flows.update(getattr(self, struct_name).flows)
return flows
def lightningignore(self) -> Tuple[str, ...]:
"""Programmatic equivalent of the ``.lightningignore`` file."""
return self._lightningignore
def lightningignore(self, lightningignore: Tuple[str, ...]) -> None:
if self._backend is not None:
raise RuntimeError(
f"Your app has been already dispatched, so modifying the `{self.name}.lightningignore` does not have an"
" effect"
)
self._lightningignore = lightningignore
def works(self, recurse: bool = True) -> List[LightningWork]:
"""Return its :class:`~lightning.app.core.work.LightningWork`."""
works = [getattr(self, el) for el in sorted(self._works)]
if not recurse:
return works
for child_name in sorted(self._flows):
for w in getattr(self, child_name).works(recurse=recurse):
works.append(w)
for struct_name in sorted(self._structures):
for w in getattr(self, struct_name).works:
works.append(w)
return works
def named_works(self, recurse: bool = True) -> List[Tuple[str, LightningWork]]:
"""Return its :class:`~lightning.app.core.work.LightningWork` with their names."""
return [(w.name, w) for w in self.works(recurse=recurse)]
def set_state(self, provided_state: Dict, recurse: bool = True) -> None:
"""Method to set the state to this LightningFlow, its children and
:class:`~lightning.app.core.work.LightningWork`.
Arguments:
provided_state: The state to be reloaded
recurse: Whether to apply the state down children.
"""
for k, v in provided_state["vars"].items():
if isinstance(v, Dict):
v = _maybe_create_drive(self.name, v)
if isinstance(v, Dict):
v = _maybe_create_cloud_compute(v)
setattr(self, k, v)
self._changes = provided_state["changes"]
self._calls.update(provided_state["calls"])
if not recurse:
return
for child, state in provided_state["flows"].items():
getattr(self, child).set_state(state)
for work, state in provided_state["works"].items():
getattr(self, work).set_state(state)
for structure, state in provided_state["structures"].items():
getattr(self, structure).set_state(state)
def stop(self, end_msg: str = "") -> None:
"""Method used to exit the application."""
if end_msg:
print(end_msg)
raise ExitAppException
def fail(self, end_msg: str = "") -> None:
"""Method used to exit and fail the application."""
if end_msg:
print(end_msg)
raise LightningFlowException
def _exit(self, end_msg: str = "") -> None:
"""Used to exit the application.
Private method.
.. deprecated:: 1.9.0
This function is deprecated and will be removed in 2.0.0. Use :meth:`stop` instead.
"""
warnings.warn(
DeprecationWarning(
"This function is deprecated and will be removed in 2.0.0. Use `LightningFlow.stop` instead."
)
)
return self.stop(end_msg=end_msg)
def _is_state_attribute(name: str) -> bool:
"""Every public attribute is part of the state by default and all protected (prefixed by '_') or private
(prefixed by '__') attributes are not.
Exceptions are listed in the `_INTERNAL_STATE_VARS` class variable.
"""
return name in LightningFlow._INTERNAL_STATE_VARS or not name.startswith("_")
def run(self, *args: Any, **kwargs: Any) -> None:
"""Override with your own logic."""
pass
def schedule(
self, cron_pattern: str, start_time: Optional[datetime] = None, user_key: Optional[str] = None
) -> bool:
"""The schedule method is used to run a part of the flow logic on timely manner.
.. code-block:: python
from lightning.app import LightningFlow
class Flow(LightningFlow):
def run(self):
if self.schedule("hourly"):
print("run some code every hour")
Arguments:
cron_pattern: The cron pattern to provide. Learn more at https://crontab.guru/.
start_time: The start time of the cron job.
user_key: Optional key used to improve the caching mechanism.
A best practice is to avoid running a dynamic flow or work under the self.schedule method.
Instead, instantiate them within the condition, but run them outside.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.structures import List
class SchedulerDAG(LightningFlow):
def __init__(self):
super().__init__()
self.dags = List()
def run(self):
if self.schedule("hourly"):
self.dags.append(DAG(...))
for dag in self.dags:
payload = dag.run()
**Learn more about Scheduling**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Schedule your components
:description: Learn more scheduling.
:col_css: col-md-4
:button_link: ../../../glossary/scheduling.html
:height: 180
:tag: Basic
.. displayitem::
:header: Build your own DAG
:description: Learn more DAG scheduling with examples.
:col_css: col-md-4
:button_link: ../../../examples/app/dag/dag.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{cron_pattern}.{frame.f_code.co_filename}.{frame.f_lineno}"
else:
cache_key = user_key
call_hash = f"{self.schedule.__name__}:{DeepHash(cache_key)[cache_key]}"
if "scheduling" not in self._calls:
self._calls["scheduling"] = {}
entered = call_hash in self._calls["scheduling"]
expr_aliases = {
"midnight": "@midnight",
"hourly": "@hourly",
"daily": "@daily",
"weekly": "@weekly",
"monthly": "@monthly",
"yearly": "@yearly",
"annually": "@annually",
}
if cron_pattern in expr_aliases:
cron_pattern = expr_aliases[cron_pattern]
if not entered:
if not start_time:
start_time = datetime.now()
schedule_metadata = {
"running": False,
"cron_pattern": cron_pattern,
"start_time": str(start_time.isoformat()),
"name": self.name,
}
self._calls["scheduling"][call_hash] = schedule_metadata
app = _LightningAppRef().get_current()
if app:
app._register_schedule(call_hash, schedule_metadata)
return True
return self._calls["scheduling"][call_hash]["running"]
def _enable_schedule(self, call_hash: str) -> None:
self._calls["scheduling"][call_hash]["running"] = True
def _disable_running_schedules(self) -> None:
if "scheduling" not in self._calls:
return
for call_hash in self._calls["scheduling"]:
self._calls["scheduling"][call_hash]["running"] = False
def configure_layout(self) -> Union[Dict[str, Any], List[Dict[str, Any]], Frontend]:
"""Configure the UI layout of this LightningFlow.
You can either
1. Return a single :class:`~lightning.app.frontend.frontend.Frontend` object to serve a user interface
for this Flow.
2. Return a single dictionary to expose the UI of a child flow.
3. Return a list of dictionaries to arrange the children of this flow in one or multiple tabs.
**Example:** Serve a static directory (with at least a file index.html inside).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StaticWebFrontend("path/to/folder/to/serve")
**Example:** Serve a streamlit UI (needs the streamlit package to be installed).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StreamlitFrontend(render_fn=my_streamlit_ui)
def my_streamlit_ui(state):
# add your streamlit code here!
import streamlit as st
**Example:** Arrange the UI of my children in tabs (default UI by Lightning).
.. code-block:: python
class Flow(LightningFlow):
def configure_layout(self):
return [
dict(name="First Tab", content=self.child0),
dict(name="Second Tab", content=self.child1),
dict(name="Lightning", content="https://lightning.ai"),
]
If you don't implement ``configure_layout``, Lightning will collect all children and display their UI in a tab
(if they have their own ``configure_layout`` implemented).
Note:
This hook gets called at the time of app creation and then again as part of the loop. If desired, the
returned layout configuration can depend on the state. The only exception are the flows that return a
:class:`~lightning.app.frontend.frontend.Frontend`. These need to be provided at the time of app creation
in order for the runtime to start the server.
**Learn more about adding UI**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Add a web user interface (UI)
:description: Learn more how to integrate several UIs.
:col_css: col-md-4
:button_link: ../../../workflows/add_web_ui/index.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
return [{"name": name, "content": component} for (name, component) in self.flows.items()]
def experimental_iterate(self, iterable: Iterable, run_once: bool = True, user_key: str = "") -> Generator:
"""This method should always be used with any kind of iterable to ensure its fault tolerant.
If you want your iterable to always be consumed from scratch, you shouldn't use this method.
Arguments:
iterable: Iterable to iterate over. The iterable shouldn't have side effects or be random.
run_once: Whether to run the entire iteration only once.
Otherwise, it would restart from the beginning.
user_key: Key to be used to track the caching mechanism.
"""
if not isinstance(iterable, Iterable):
raise TypeError(f"An iterable should be provided to `self.iterate` method. Found {iterable}")
# TODO: Find a better way. Investigated using __reduce__, but state change invalidate the cache.
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{frame.f_code.co_filename}.{frame.f_code.co_firstlineno}"
else:
cache_key = user_key
call_hash = f"{self.experimental_iterate.__name__}:{DeepHash(cache_key)[cache_key]}"
entered = call_hash in self._calls
has_started = entered and self._calls[call_hash]["counter"] > 0
has_finished = entered and self._calls[call_hash]["has_finished"]
if has_finished:
if not run_once:
self._calls[call_hash].update({"counter": 0, "has_finished": False})
else:
return range(0)
if not has_started:
self._calls[call_hash] = {
"name": self.experimental_iterate.__name__,
"call_hash": call_hash,
"counter": 0,
"has_finished": False,
}
skip_counter = max(self._calls[call_hash]["counter"], 0)
for counter, value in enumerate(iterable):
if skip_counter:
skip_counter -= 1
continue
self._calls[call_hash].update({"counter": counter})
yield value
self._calls[call_hash].update({"has_finished": True})
def configure_commands(self) -> None:
"""Configure the commands of this LightningFlow.
Returns a list of dictionaries mapping a command name to a flow method.
.. code-block:: python
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def configure_commands(self):
return {"my_command_name": self.my_remote_method}
def my_remote_method(self, name):
self.names.append(name)
Once the app is running with the following command:
.. code-block:: bash
lightning_app run app app.py
.. code-block:: bash
lightning_app my_command_name --args name=my_own_name
"""
raise NotImplementedError
def configure_api(self) -> None:
"""Configure the API routes of the LightningFlow.
Returns a list of HttpMethod such as Post or Get.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.api import Post
from pydantic import BaseModel
class HandlerModel(BaseModel):
name: str
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def handler(self, config: HandlerModel) -> None:
self.names.append(config.name)
def configure_api(self):
return [Post("/v1/api/request", self.handler)]
Once the app is running, you can access the Swagger UI of the app
under the ``/docs`` route.
"""
raise NotImplementedError
def state_dict(self) -> dict:
"""Returns the current flow state but not its children."""
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"calls": self._calls.copy(),
"changes": {},
"flows": {},
"works": {},
"structures": {},
}
def load_state_dict(
self,
flow_state: Dict[str, Any],
children_states: Dict[str, Any],
strict: bool = True,
) -> None:
"""Reloads the state of this flow and its children.
.. code-block:: python
class Work(LightningWork):
def __init__(self):
super().__init__()
self.counter = 0
def run(self):
self.counter += 1
class Flow(LightningFlow):
def run(self):
# dynamically create a work.
if not getattr(self, "w", None):
self.w = WorkReload()
self.w.run()
def load_state_dict(self, flow_state, children_states, strict) -> None:
# 1: Re-instantiate the dynamic work
self.w = Work()
# 2: Make any states modification / migration.
...
# 3: Call the parent ``load_state_dict`` to
# recursively reload the states.
super().load_state_dict(
flow_state,
children_states,
strict,
)
Arguments:
flow_state: The state of the current flow.
children_states: The state of the dynamic children of this flow.
strict: Whether to raise an exception if a dynamic
children hasn't been re-created.
"""
self.set_state(flow_state, recurse=False)
direct_children_states = {k: v for k, v in children_states.items() if "." not in k}
for child_name, state in direct_children_states.items():
child = getattr(self, child_name, None)
if isinstance(child, LightningFlow):
lower_children_states = {
k.replace(child_name + ".", ""): v
for k, v in children_states.items()
if k.startswith(child_name) and k != child_name
}
child.load_state_dict(state, lower_children_states, strict=strict)
elif isinstance(child, LightningWork):
child.set_state(state)
elif strict:
raise ValueError(f"The component {child_name} wasn't instantiated for the component {self.name}")
The provided code snippet includes necessary dependencies for implementing the `_convert_paths_after_init` function. Write a Python function `def _convert_paths_after_init(root: "LightningFlow")` to solve the following problem:
Converts the path attributes on a component to a dictionary. This is necessary because at the time of instantiating the component, its full affiliation is not known and Paths that get passed to other componenets during ``__init__`` are otherwise not able to reference their origin or consumer.
Here is the function:
def _convert_paths_after_init(root: "LightningFlow"):
"""Converts the path attributes on a component to a dictionary.
This is necessary because at the time of instantiating the component, its full affiliation is not known and Paths
that get passed to other componenets during ``__init__`` are otherwise not able to reference their origin or
consumer.
"""
from lightning.app.core import LightningFlow, LightningWork
from lightning.app.storage.path import Path
for component in breadth_first(root, types=(LightningFlow, LightningWork)):
for attr in list(component.__dict__.keys()):
value = getattr(component, attr)
if isinstance(value, Path):
delattr(component, attr)
component._paths[attr] = value.to_dict() | Converts the path attributes on a component to a dictionary. This is necessary because at the time of instantiating the component, its full affiliation is not known and Paths that get passed to other componenets during ``__init__`` are otherwise not able to reference their origin or consumer. |
155,615 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
class CloudCompute:
"""Configure the cloud runtime for a lightning work or flow.
Arguments:
name: The name of the hardware to use. A full list of supported options can be found in
:doc:`/core_api/lightning_work/compute`. If you have a request for more hardware options, please contact
`onprem@lightning.ai <mailto:onprem@lightning.ai>`_.
disk_size: The disk size in Gigabytes.
The value you set here will be allocated to the /home folder.
idle_timeout: The number of seconds to wait before pausing the compute when the work is running and idle.
This timeout starts whenever your run() method succeeds (or fails).
If the timeout is reached, the instance pauses until the next run() call happens.
shm_size: Shared memory size in MiB, backed by RAM. min 512, max 8192, it will auto update in steps of 512.
For example 1100 will become 1024. If set to zero (the default) will get the default 64MiB inside docker.
mounts: External data sources which should be mounted into a work as a filesystem at runtime.
colocation_group_id: Identifier for groups of works to be colocated in the same datacenter.
Set this to a string of max. 64 characters and all works with this group id will run in the same datacenter.
If not set, the works are not guaranteed to be colocated.
interruptible: Whether to run on a interruptible machine e.g the machine can be stopped
at any time by the providers. This is also known as spot or preemptible machines.
Compared to on-demand machines, they tend to be cheaper.
"""
name: str = "default"
disk_size: int = 0
idle_timeout: Optional[int] = None
shm_size: Optional[int] = None
mounts: Optional[Union[Mount, List[Mount]]] = None
colocation_group_id: Optional[str] = None
interruptible: bool = False
_internal_id: Optional[str] = None
def __post_init__(self) -> None:
_verify_mount_root_dirs_are_unique(self.mounts)
self.name = self.name.lower()
if self.shm_size is None:
if "gpu" in self.name:
self.shm_size = 1024
else:
self.shm_size = 0
if self.interruptible:
if not enable_interruptible_works():
raise ValueError("CloudCompute with `interruptible=True` isn't supported yet.")
if "gpu" not in self.name:
raise ValueError("CloudCompute `interruptible=True` is supported only with GPU.")
# FIXME: Clean the mess on the platform side
if self.name == "default" or self.name == "cpu":
self.name = "cpu-small"
self._internal_id = "default"
# TODO: Remove from the platform first.
self.preemptible = self.interruptible
# All `default` CloudCompute are identified in the same way.
if self._internal_id is None:
self._internal_id = self._generate_id()
if self.colocation_group_id is not None and (
not isinstance(self.colocation_group_id, str)
or (isinstance(self.colocation_group_id, str) and len(self.colocation_group_id) > 64)
):
raise ValueError("colocation_group_id can only be a string of maximum 64 characters.")
def to_dict(self) -> dict:
_verify_mount_root_dirs_are_unique(self.mounts)
return {"type": __CLOUD_COMPUTE_IDENTIFIER__, **asdict(self)}
def from_dict(cls, d: dict) -> "CloudCompute":
assert d.pop("type") == __CLOUD_COMPUTE_IDENTIFIER__
mounts = d.pop("mounts", None)
if mounts is None:
pass
elif isinstance(mounts, dict):
d["mounts"] = Mount(**mounts)
elif isinstance(mounts, (list)):
d["mounts"] = []
for mount in mounts:
d["mounts"].append(Mount(**mount))
else:
raise TypeError(
f"mounts argument must be one of [None, Mount, List[Mount]], "
f"received {mounts} of type {type(mounts)}"
)
_verify_mount_root_dirs_are_unique(d.get("mounts"))
return cls(**d)
def id(self) -> Optional[str]:
return self._internal_id
def is_default(self) -> bool:
return self.name in ("default", "cpu-small")
def _generate_id(self):
return "default" if self.name == "default" else uuid4().hex[:7]
def clone(self):
new_dict = self.to_dict()
new_dict["_internal_id"] = self._generate_id()
return self.from_dict(new_dict)
class Path(PathlibPath):
"""A drop-in replacement for :class:`pathlib.Path` for all paths in Lightning.
The Lightning Path works exactly the same as :class:`pathlib.Path` but it also remembers in which LightningWork
it was created. If the Path gets passed to a different LightningWork, the file or folder can then be easily
accessed no matter where it is located in the other Work's filesystem.
Args:
*args: Accepts the same arguments as in :class:`pathlib.Path`
**kwargs: Accepts the same keyword arguments as in :class:`pathlib.Path`
"""
def _from_parts(cls, args: Any, **__unused) -> "Path":
"""This gets called from the super class in ``pathlib.Path.__new__``.
The Lightning Path overrides this to validate the instantiation in the case parts are passed in individually. In
such a case we need to validate that all parts have the same `origin` and if not, an error is raised.
"""
if args and isinstance(args[0], str) and args[0].startswith("lit://"):
parts = list(args)
parts[0] = parts[0][len("lit://") :]
args = (_storage_root_dir(), *parts)
if (sys.version_info.major, sys.version_info.minor) < (3, 10):
__unused.setdefault("init", True)
new_path = super()._from_parts(args, **__unused)
else:
new_path = super()._from_parts(args)
new_path._init_attributes() # we use this instead of defining a __init__() method
paths_from_parts = [part for part in args if isinstance(part, Path)]
if not paths_from_parts:
return new_path
top_path = paths_from_parts[0]
origins = [part._origin for part in paths_from_parts]
if not all(origins[0] == origin or origin is None for origin in origins):
raise TypeError(
"Tried to instantiate a Lightning Path from multiple other Paths that originate from different"
" LightningWork."
)
new_path._copy_properties_from(top_path)
return new_path
def _init_attributes(self):
self._name: Optional[str] = None
# the origin is the work that created this Path and wants to expose file(s)
self._origin: Optional[Union["LightningWork", str]] = None
# the consumer is the Work that needs access to the file(s) from the consumer
self._consumer: Optional[Union["LightningWork", str]] = None
self._metadata = {}
# request queue: used to transfer message to storage orchestrator
self._request_queue: Optional[BaseQueue] = None
# response queue: used to receive status message from storage orchestrator
self._response_queue: Optional[BaseQueue] = None
def origin_name(self) -> str:
"""The name of the LightningWork where this path was first created.
Attaching a Path to a LightningWork will automatically make it the `origin`.
"""
from lightning.app.core.work import LightningWork
return self._origin.name if isinstance(self._origin, LightningWork) else self._origin
def consumer_name(self) -> str:
"""The name of the LightningWork where this path is being accessed.
By default, this is the same as the :attr:`origin_name`.
"""
from lightning.app.core.work import LightningWork
return self._consumer.name if isinstance(self._consumer, LightningWork) else self._consumer
def hash(self) -> Optional[str]:
"""The hash of this Path uniquely identifies the file path and the associated origin Work.
Returns ``None`` if the origin is not defined, i.e., this Path did not yet get attached to a LightningWork.
"""
if self._origin is None:
return None
contents = f"{self.origin_name}/{self}"
return hashlib.sha1(contents.encode("utf-8")).hexdigest()
def parents(self) -> Sequence["Path"]:
parents: List["Path"] = list(super().parents)
for parent in parents:
parent._copy_properties_from(self)
return parents
def parent(self) -> "Path":
parent: Path = super().parent
parent._copy_properties_from(self)
return parent
def exists(self) -> bool:
"""Check if the path exists locally or remotely.
If the path exists locally, this method immediately returns ``True``, otherwise it will make a RPC call
to the attached origin Work and check if the path exists remotely.
If you strictly want to check local existence only, use :meth:`exists_local` instead. If you strictly want
to check existence on the remote (regardless of whether the file exists locally or not), use
:meth:`exists_remote`.
"""
return self.exists_local() or (self._origin and self.exists_remote())
def exists_local(self) -> bool:
"""Check if the path exists locally."""
return super().exists()
def exists_remote(self) -> bool:
"""Check if the path exists remotely on the attached orgin Work.
Raises:
RuntimeError: If the path is not attached to any Work (origin undefined).
"""
# Fail early if we need to check the remote but an origin is not defined
if not self._origin or self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to check if the file {self} exists, but the path is not attached to a LightningWork."
f" Set it as an attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue that with a request for a path existence check
request = _ExistsRequest(source=self.origin_name, path=str(self), name=self._name, hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the response to come back
response: _ExistsResponse = self._response_queue.get() # blocking
return response.exists
def get(self, overwrite: bool = False) -> None:
if _is_flow_context():
raise RuntimeError("`Path.get()` can only be called from within the `run()` method of LightningWork.")
if self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningApp."
f" Are you trying to get the file from within `__init__`?"
)
if self._origin is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningWork. Set it as an"
f" attribute to a LightningWork or pass it to the `run()` method."
)
if self.exists_local() and not overwrite:
raise FileExistsError(
f"The file or folder {self} exists locally. Pass `overwrite=True` if you wish to replace it"
f" with the new contents."
)
# 1. Send message to orchestrator through queue with details of the transfer
# the source is the name of the work that owns the file that we request
# the destination is determined by the queue, since each work has a dedicated send and recv queue
request = _GetRequest(source=self.origin_name, path=str(self), hash=self.hash, name=self._name)
self._request_queue.put(request)
# 2. Wait for the transfer to finish
response: _GetResponse = self._response_queue.get() # blocking
self._validate_get_response(response)
fs = _filesystem()
# 3. Wait until the file appears in shared storage
while not fs.exists(response.path) or fs.info(response.path)["size"] != response.size:
sleep(REMOTE_STORAGE_WAIT)
if self.exists_local() and self.is_dir():
# Delete the directory, otherwise we can't overwrite it
shutil.rmtree(self)
# 4. Copy the file from the shared storage to the destination on the local filesystem
if fs.isdir(response.path):
if isinstance(fs, LocalFileSystem):
shutil.copytree(response.path, self.resolve())
else:
glob = f"{str(response.path)}/**"
_logger.debug(f"Attempting to copy {glob} -> {str(self.absolute())}")
fs.get(glob, str(self.absolute()), recursive=False)
else:
_logger.debug(f"Attempting to copy {str(response.path)} -> {str(self.absolute())}")
fs.get(str(response.path), str(self.absolute()), recursive=False)
def to_dict(self) -> dict:
"""Serialize this Path to a dictionary."""
return {
"path": str(self),
"origin_name": self.origin_name,
"consumer_name": self.consumer_name,
"metadata": self._metadata,
}
def from_dict(cls, content: dict) -> "Path":
"""Instantiate a Path from a dictionary."""
path = cls(content["path"])
path._origin = content["origin_name"]
path._consumer = content["consumer_name"]
path._metadata = content["metadata"]
return path
def _validate_get_response(self, response: "_GetResponse") -> None:
if response.source != self._origin or response.hash != self.hash:
raise RuntimeError(
f"Tried to get the file {self} but received a response for a request it did not send. The response"
f" contents are: {response}"
)
if response.exception is not None:
raise RuntimeError(
f"An exception was raised while trying to transfer the contents at {response.path}"
f" from Work {response.source} to {response.destination}. See the full stacktrace above."
) from response.exception
def _attach_work(self, work: "LightningWork") -> None:
"""Attach a LightningWork to this Path.
The first work to be attached becomes the `origin`, i.e., the Work that is meant to expose the file to other
Work. Attaching a Work to a Path that already has an `origin` Work will make it a `consumer`. A consumer Work
is a work that can access the file only by first transferring it via :meth:`transfer`.
Args:
work: LightningWork to be attached to this Path.
"""
if self._origin is None:
# Can become an owner only if there is not already one
self._origin = work
self._consumer = work
def _attach_queues(self, request_queue: BaseQueue, response_queue: BaseQueue) -> None:
"""Attaches the queues for communication with the Storage Orchestrator."""
self._request_queue = request_queue
self._response_queue = response_queue
def _sanitize(self) -> None:
"""Sanitize this Path so that it can be deep-copied."""
self._origin = self.origin_name
self._consumer = self.consumer_name
self._request_queue = None
self._response_queue = None
def _copy_properties_from(self, other: "Path") -> None:
self._origin = other._origin
self._consumer = other._consumer
self._metadata = other._metadata
self._request_queue = other._request_queue
self._response_queue = other._response_queue
def with_name(self, name: str) -> "Path":
path: Path = super().with_name(name)
path._copy_properties_from(self)
return path
def with_stem(self, stem: str) -> "Path":
path: Path = super().with_stem(stem)
path._copy_properties_from(self)
return path
def with_suffix(self, suffix: str) -> "Path":
path: Path = super().with_suffix(suffix)
path._copy_properties_from(self)
return path
def relative_to(self, *other) -> "Path":
path: Path = super().relative_to(*other)
path._copy_properties_from(self)
return path
def __truediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__truediv__(other)
path._copy_properties_from(self)
return path
def __rtruediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__rtruediv__(other)
path._copy_properties_from(self)
return path
def __reduce__(self):
return Path.from_dict, (self.to_dict(),)
def __json__(self) -> dict:
"""Converts the Path to a json-serializable dict object."""
return self.to_dict()
def _handle_exists_request(work: "LightningWork", request: _ExistsRequest) -> _ExistsResponse:
return _ExistsResponse(
source=request.source,
name=request.name,
hash=request.hash,
path=request.path,
destination=request.destination,
exists=os.path.exists(request.path),
)
def _handle_get_request(work: "LightningWork", request: _GetRequest) -> _GetResponse:
from lightning.app.storage.copier import _copy_files
source_path = pathlib.Path(request.path)
destination_path = _shared_storage_path() / request.hash
response = _GetResponse(
source=request.source,
name=request.name,
path=str(destination_path),
hash=request.hash,
size=source_path.stat().st_size,
destination=request.destination,
)
try:
_copy_files(source_path, destination_path)
_logger.debug(f"All files copied from {request.path} to {response.path}.")
except Exception as ex:
response.exception = ex
return response
class _BasePayload(ABC):
def __init__(self, value: Any) -> None:
self._value = value
# the attribute name given to the payload
self._name: Optional[str] = None
# the origin is the work that created this Path and wants to expose file(s)
self._origin: Optional[Union["LightningWork", str]] = None
# the consumer is the Work that needs access to the file(s) from the consumer
self._consumer: Optional[Union["LightningWork", str]] = None
self._metadata = {}
# request queue: used to transfer message to storage orchestrator
self._request_queue: Optional[BaseQueue] = None
# response queue: used to receive status message from storage orchestrator
self._response_queue: Optional[BaseQueue] = None
def name(self) -> Optional[str]:
return self._name
def value(self) -> Optional[Any]:
"""The real object that this payload holds."""
return self._value
def hash(self) -> Optional[str]:
"""The hash of this Payload uniquely identifies the payload and the associated origin Work.
Returns ``None`` if the origin is not defined, i.e., this Path did not yet get attached to a LightningWork.
"""
if self._origin is None:
return None
contents = f"{self.origin_name}/{self.consumer_name}/{self.name}"
return hashlib.sha1(contents.encode("utf-8")).hexdigest()
def origin_name(self) -> str:
"""The name of the LightningWork where this payload was first created.
Attaching a Payload to a LightningWork will automatically make it the `origin`.
"""
from lightning.app.core.work import LightningWork
return self._origin.name if isinstance(self._origin, LightningWork) else self._origin
def consumer_name(self) -> str:
"""The name of the LightningWork where this payload is being accessed.
By default, this is the same as the :attr:`origin_name`.
"""
from lightning.app.core.work import LightningWork
return self._consumer.name if isinstance(self._consumer, LightningWork) else self._consumer
def _path(self) -> Optional[Path]:
"""Path to the file that the payload value gets serialized to."""
if not self._name:
return None
return Path("lit://", self._name)
def save(self, obj: Any, path: str) -> None:
"""Override this method with your own saving logic."""
def load(self, path: str) -> Any:
"""Override this method with your own loading logic."""
def _attach_work(self, work: "LightningWork") -> None:
"""Attach a LightningWork to this PayLoad.
Args:
work: LightningWork to be attached to this Payload.
"""
if self._origin is None:
# Can become an owner only if there is not already one
self._origin = work.name
self._consumer = work.name
def _attach_queues(self, request_queue: BaseQueue, response_queue: BaseQueue) -> None:
"""Attaches the queues for communication with the Storage Orchestrator."""
self._request_queue = request_queue
self._response_queue = response_queue
def _sanitize(self) -> None:
"""Sanitize this Payload so that it can be deep-copied."""
self._origin = self.origin_name
self._consumer = self.consumer_name
self._request_queue = None
self._response_queue = None
def exists_remote(self):
"""Check if the payload exists remotely on the attached orgin Work.
Raises:
RuntimeError: If the payload is not attached to any Work (origin undefined).
"""
# Fail early if we need to check the remote but an origin is not defined
if not self._origin or self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to check if the payload {self} exists, but the payload is not attached to a LightningWork."
f" Set it as an attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue that with a request for a path existence check
request = _ExistsRequest(source=self.origin_name, name=self._name, path=str(self._path), hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the response to come back
response: _ExistsResponse = self._response_queue.get() # blocking
return response.exists
def get(self) -> Any:
if _is_flow_context():
raise RuntimeError("`Payload.get()` can only be called from within the `run()` method of LightningWork.")
if self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to get the file {self}, but the payload is not attached to a LightningApp."
f" Are you trying to get the file from within `__init__`?"
)
if self._origin is None:
raise RuntimeError(
f"Trying to get the file {self}, but the payload is not attached to a LightningWork. Set it as an"
f" attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue with details of the transfer
# the source is the name of the work that owns the file that we request
# the destination is determined by the queue, since each work has a dedicated send and recv queue
request = _GetRequest(source=self.origin_name, name=self._name, path=str(self._path), hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the transfer to finish
response: _GetResponse = self._response_queue.get() # blocking
self._validate_get_response(response)
fs = _filesystem()
# 3. Wait until the file appears in shared storage
while not fs.exists(response.path) or fs.info(response.path)["size"] != response.size:
sleep(REMOTE_STORAGE_WAIT)
# 4. Copy the file from the shared storage to the destination on the local filesystem
local_path = self._path
_logger.debug(f"Attempting to copy {str(response.path)} -> {str(local_path)}")
fs.get(str(response.path), str(local_path), recursive=False)
# Ensure the file is properly written
sleep(0.5)
self._value = self.load(local_path)
return self._value
def _validate_get_response(self, response: "_GetResponse") -> None:
if response.source != self._origin or response.hash != self.hash:
raise RuntimeError(
f"Tried to get the file {self} but received a response for a request it did not send. The response"
f" contents are: {response}"
)
if response.exception is not None:
raise RuntimeError(
f"An exception was raised while trying to transfer the contents at {response.path}"
f" from Work {response.source} to {response.destination}. See the full stacktrace above."
) from response.exception
def to_dict(self) -> dict:
"""Serialize this Path to a dictionary."""
return {
"name": self.name,
"origin_name": self.origin_name,
"consumer_name": self.consumer_name,
"metadata": self._metadata,
}
def from_dict(cls, content: dict) -> "_BasePayload":
"""Instantiate a Payload from a dictionary."""
payload = cls(None)
payload._name = content["name"]
payload._origin = content["origin_name"]
payload._consumer = content["consumer_name"]
payload._metadata = content["metadata"]
return payload
def _handle_exists_request(work: "LightningWork", request: _ExistsRequest) -> _ExistsResponse:
return _ExistsResponse(
source=request.source,
path=request.path,
name=request.name,
destination=request.destination,
hash=request.hash,
exists=getattr(work, request.name, None) is not None,
)
def _handle_get_request(work: "LightningWork", request: _GetRequest) -> _GetResponse:
from lightning.app.storage.copier import _copy_files
source_path = pathlib.Path(request.path)
destination_path = _shared_storage_path() / request.hash
response = _GetResponse(
source=request.source,
name=request.name,
path=str(destination_path),
hash=request.hash,
destination=request.destination,
)
try:
payload = getattr(work, request.name)
payload.save(payload.value, source_path)
response.size = source_path.stat().st_size
_copy_files(source_path, destination_path)
_logger.debug(f"All files copied from {request.path} to {response.path}.")
except Exception as ex:
response.exception = ex
return response
The provided code snippet includes necessary dependencies for implementing the `_sanitize_state` function. Write a Python function `def _sanitize_state(state: Dict[str, Any]) -> Dict[str, Any]` to solve the following problem:
Utility function to sanitize the state of a component. Sanitization enables the state to be deep-copied and hashed.
Here is the function:
def _sanitize_state(state: Dict[str, Any]) -> Dict[str, Any]:
"""Utility function to sanitize the state of a component.
Sanitization enables the state to be deep-copied and hashed.
"""
from lightning.app.storage import Drive, Path
from lightning.app.storage.payload import _BasePayload
def sanitize_path(path: Path) -> Path:
path_copy = Path(path)
path_copy._sanitize()
return path_copy
def sanitize_payload(payload: _BasePayload):
return type(payload).from_dict(content=payload.to_dict())
def sanitize_drive(drive: Drive) -> Dict:
return drive.to_dict()
def sanitize_cloud_compute(cloud_compute: CloudCompute) -> Dict:
return cloud_compute.to_dict()
state = apply_to_collection(state, dtype=Path, function=sanitize_path)
state = apply_to_collection(state, dtype=_BasePayload, function=sanitize_payload)
state = apply_to_collection(state, dtype=Drive, function=sanitize_drive)
state = apply_to_collection(state, dtype=CloudCompute, function=sanitize_cloud_compute)
return state | Utility function to sanitize the state of a component. Sanitization enables the state to be deep-copied and hashed. |
155,616 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
class Path(PathlibPath):
"""A drop-in replacement for :class:`pathlib.Path` for all paths in Lightning.
The Lightning Path works exactly the same as :class:`pathlib.Path` but it also remembers in which LightningWork
it was created. If the Path gets passed to a different LightningWork, the file or folder can then be easily
accessed no matter where it is located in the other Work's filesystem.
Args:
*args: Accepts the same arguments as in :class:`pathlib.Path`
**kwargs: Accepts the same keyword arguments as in :class:`pathlib.Path`
"""
def _from_parts(cls, args: Any, **__unused) -> "Path":
"""This gets called from the super class in ``pathlib.Path.__new__``.
The Lightning Path overrides this to validate the instantiation in the case parts are passed in individually. In
such a case we need to validate that all parts have the same `origin` and if not, an error is raised.
"""
if args and isinstance(args[0], str) and args[0].startswith("lit://"):
parts = list(args)
parts[0] = parts[0][len("lit://") :]
args = (_storage_root_dir(), *parts)
if (sys.version_info.major, sys.version_info.minor) < (3, 10):
__unused.setdefault("init", True)
new_path = super()._from_parts(args, **__unused)
else:
new_path = super()._from_parts(args)
new_path._init_attributes() # we use this instead of defining a __init__() method
paths_from_parts = [part for part in args if isinstance(part, Path)]
if not paths_from_parts:
return new_path
top_path = paths_from_parts[0]
origins = [part._origin for part in paths_from_parts]
if not all(origins[0] == origin or origin is None for origin in origins):
raise TypeError(
"Tried to instantiate a Lightning Path from multiple other Paths that originate from different"
" LightningWork."
)
new_path._copy_properties_from(top_path)
return new_path
def _init_attributes(self):
self._name: Optional[str] = None
# the origin is the work that created this Path and wants to expose file(s)
self._origin: Optional[Union["LightningWork", str]] = None
# the consumer is the Work that needs access to the file(s) from the consumer
self._consumer: Optional[Union["LightningWork", str]] = None
self._metadata = {}
# request queue: used to transfer message to storage orchestrator
self._request_queue: Optional[BaseQueue] = None
# response queue: used to receive status message from storage orchestrator
self._response_queue: Optional[BaseQueue] = None
def origin_name(self) -> str:
"""The name of the LightningWork where this path was first created.
Attaching a Path to a LightningWork will automatically make it the `origin`.
"""
from lightning.app.core.work import LightningWork
return self._origin.name if isinstance(self._origin, LightningWork) else self._origin
def consumer_name(self) -> str:
"""The name of the LightningWork where this path is being accessed.
By default, this is the same as the :attr:`origin_name`.
"""
from lightning.app.core.work import LightningWork
return self._consumer.name if isinstance(self._consumer, LightningWork) else self._consumer
def hash(self) -> Optional[str]:
"""The hash of this Path uniquely identifies the file path and the associated origin Work.
Returns ``None`` if the origin is not defined, i.e., this Path did not yet get attached to a LightningWork.
"""
if self._origin is None:
return None
contents = f"{self.origin_name}/{self}"
return hashlib.sha1(contents.encode("utf-8")).hexdigest()
def parents(self) -> Sequence["Path"]:
parents: List["Path"] = list(super().parents)
for parent in parents:
parent._copy_properties_from(self)
return parents
def parent(self) -> "Path":
parent: Path = super().parent
parent._copy_properties_from(self)
return parent
def exists(self) -> bool:
"""Check if the path exists locally or remotely.
If the path exists locally, this method immediately returns ``True``, otherwise it will make a RPC call
to the attached origin Work and check if the path exists remotely.
If you strictly want to check local existence only, use :meth:`exists_local` instead. If you strictly want
to check existence on the remote (regardless of whether the file exists locally or not), use
:meth:`exists_remote`.
"""
return self.exists_local() or (self._origin and self.exists_remote())
def exists_local(self) -> bool:
"""Check if the path exists locally."""
return super().exists()
def exists_remote(self) -> bool:
"""Check if the path exists remotely on the attached orgin Work.
Raises:
RuntimeError: If the path is not attached to any Work (origin undefined).
"""
# Fail early if we need to check the remote but an origin is not defined
if not self._origin or self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to check if the file {self} exists, but the path is not attached to a LightningWork."
f" Set it as an attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue that with a request for a path existence check
request = _ExistsRequest(source=self.origin_name, path=str(self), name=self._name, hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the response to come back
response: _ExistsResponse = self._response_queue.get() # blocking
return response.exists
def get(self, overwrite: bool = False) -> None:
if _is_flow_context():
raise RuntimeError("`Path.get()` can only be called from within the `run()` method of LightningWork.")
if self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningApp."
f" Are you trying to get the file from within `__init__`?"
)
if self._origin is None:
raise RuntimeError(
f"Trying to get the file {self}, but the path is not attached to a LightningWork. Set it as an"
f" attribute to a LightningWork or pass it to the `run()` method."
)
if self.exists_local() and not overwrite:
raise FileExistsError(
f"The file or folder {self} exists locally. Pass `overwrite=True` if you wish to replace it"
f" with the new contents."
)
# 1. Send message to orchestrator through queue with details of the transfer
# the source is the name of the work that owns the file that we request
# the destination is determined by the queue, since each work has a dedicated send and recv queue
request = _GetRequest(source=self.origin_name, path=str(self), hash=self.hash, name=self._name)
self._request_queue.put(request)
# 2. Wait for the transfer to finish
response: _GetResponse = self._response_queue.get() # blocking
self._validate_get_response(response)
fs = _filesystem()
# 3. Wait until the file appears in shared storage
while not fs.exists(response.path) or fs.info(response.path)["size"] != response.size:
sleep(REMOTE_STORAGE_WAIT)
if self.exists_local() and self.is_dir():
# Delete the directory, otherwise we can't overwrite it
shutil.rmtree(self)
# 4. Copy the file from the shared storage to the destination on the local filesystem
if fs.isdir(response.path):
if isinstance(fs, LocalFileSystem):
shutil.copytree(response.path, self.resolve())
else:
glob = f"{str(response.path)}/**"
_logger.debug(f"Attempting to copy {glob} -> {str(self.absolute())}")
fs.get(glob, str(self.absolute()), recursive=False)
else:
_logger.debug(f"Attempting to copy {str(response.path)} -> {str(self.absolute())}")
fs.get(str(response.path), str(self.absolute()), recursive=False)
def to_dict(self) -> dict:
"""Serialize this Path to a dictionary."""
return {
"path": str(self),
"origin_name": self.origin_name,
"consumer_name": self.consumer_name,
"metadata": self._metadata,
}
def from_dict(cls, content: dict) -> "Path":
"""Instantiate a Path from a dictionary."""
path = cls(content["path"])
path._origin = content["origin_name"]
path._consumer = content["consumer_name"]
path._metadata = content["metadata"]
return path
def _validate_get_response(self, response: "_GetResponse") -> None:
if response.source != self._origin or response.hash != self.hash:
raise RuntimeError(
f"Tried to get the file {self} but received a response for a request it did not send. The response"
f" contents are: {response}"
)
if response.exception is not None:
raise RuntimeError(
f"An exception was raised while trying to transfer the contents at {response.path}"
f" from Work {response.source} to {response.destination}. See the full stacktrace above."
) from response.exception
def _attach_work(self, work: "LightningWork") -> None:
"""Attach a LightningWork to this Path.
The first work to be attached becomes the `origin`, i.e., the Work that is meant to expose the file to other
Work. Attaching a Work to a Path that already has an `origin` Work will make it a `consumer`. A consumer Work
is a work that can access the file only by first transferring it via :meth:`transfer`.
Args:
work: LightningWork to be attached to this Path.
"""
if self._origin is None:
# Can become an owner only if there is not already one
self._origin = work
self._consumer = work
def _attach_queues(self, request_queue: BaseQueue, response_queue: BaseQueue) -> None:
"""Attaches the queues for communication with the Storage Orchestrator."""
self._request_queue = request_queue
self._response_queue = response_queue
def _sanitize(self) -> None:
"""Sanitize this Path so that it can be deep-copied."""
self._origin = self.origin_name
self._consumer = self.consumer_name
self._request_queue = None
self._response_queue = None
def _copy_properties_from(self, other: "Path") -> None:
self._origin = other._origin
self._consumer = other._consumer
self._metadata = other._metadata
self._request_queue = other._request_queue
self._response_queue = other._response_queue
def with_name(self, name: str) -> "Path":
path: Path = super().with_name(name)
path._copy_properties_from(self)
return path
def with_stem(self, stem: str) -> "Path":
path: Path = super().with_stem(stem)
path._copy_properties_from(self)
return path
def with_suffix(self, suffix: str) -> "Path":
path: Path = super().with_suffix(suffix)
path._copy_properties_from(self)
return path
def relative_to(self, *other) -> "Path":
path: Path = super().relative_to(*other)
path._copy_properties_from(self)
return path
def __truediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__truediv__(other)
path._copy_properties_from(self)
return path
def __rtruediv__(self, other: Union["Path", PathlibPath, str]) -> "Path":
path: Path = super().__rtruediv__(other)
path._copy_properties_from(self)
return path
def __reduce__(self):
return Path.from_dict, (self.to_dict(),)
def __json__(self) -> dict:
"""Converts the Path to a json-serializable dict object."""
return self.to_dict()
def _handle_exists_request(work: "LightningWork", request: _ExistsRequest) -> _ExistsResponse:
return _ExistsResponse(
source=request.source,
name=request.name,
hash=request.hash,
path=request.path,
destination=request.destination,
exists=os.path.exists(request.path),
)
def _handle_get_request(work: "LightningWork", request: _GetRequest) -> _GetResponse:
from lightning.app.storage.copier import _copy_files
source_path = pathlib.Path(request.path)
destination_path = _shared_storage_path() / request.hash
response = _GetResponse(
source=request.source,
name=request.name,
path=str(destination_path),
hash=request.hash,
size=source_path.stat().st_size,
destination=request.destination,
)
try:
_copy_files(source_path, destination_path)
_logger.debug(f"All files copied from {request.path} to {response.path}.")
except Exception as ex:
response.exception = ex
return response
class _BasePayload(ABC):
def __init__(self, value: Any) -> None:
self._value = value
# the attribute name given to the payload
self._name: Optional[str] = None
# the origin is the work that created this Path and wants to expose file(s)
self._origin: Optional[Union["LightningWork", str]] = None
# the consumer is the Work that needs access to the file(s) from the consumer
self._consumer: Optional[Union["LightningWork", str]] = None
self._metadata = {}
# request queue: used to transfer message to storage orchestrator
self._request_queue: Optional[BaseQueue] = None
# response queue: used to receive status message from storage orchestrator
self._response_queue: Optional[BaseQueue] = None
def name(self) -> Optional[str]:
return self._name
def value(self) -> Optional[Any]:
"""The real object that this payload holds."""
return self._value
def hash(self) -> Optional[str]:
"""The hash of this Payload uniquely identifies the payload and the associated origin Work.
Returns ``None`` if the origin is not defined, i.e., this Path did not yet get attached to a LightningWork.
"""
if self._origin is None:
return None
contents = f"{self.origin_name}/{self.consumer_name}/{self.name}"
return hashlib.sha1(contents.encode("utf-8")).hexdigest()
def origin_name(self) -> str:
"""The name of the LightningWork where this payload was first created.
Attaching a Payload to a LightningWork will automatically make it the `origin`.
"""
from lightning.app.core.work import LightningWork
return self._origin.name if isinstance(self._origin, LightningWork) else self._origin
def consumer_name(self) -> str:
"""The name of the LightningWork where this payload is being accessed.
By default, this is the same as the :attr:`origin_name`.
"""
from lightning.app.core.work import LightningWork
return self._consumer.name if isinstance(self._consumer, LightningWork) else self._consumer
def _path(self) -> Optional[Path]:
"""Path to the file that the payload value gets serialized to."""
if not self._name:
return None
return Path("lit://", self._name)
def save(self, obj: Any, path: str) -> None:
"""Override this method with your own saving logic."""
def load(self, path: str) -> Any:
"""Override this method with your own loading logic."""
def _attach_work(self, work: "LightningWork") -> None:
"""Attach a LightningWork to this PayLoad.
Args:
work: LightningWork to be attached to this Payload.
"""
if self._origin is None:
# Can become an owner only if there is not already one
self._origin = work.name
self._consumer = work.name
def _attach_queues(self, request_queue: BaseQueue, response_queue: BaseQueue) -> None:
"""Attaches the queues for communication with the Storage Orchestrator."""
self._request_queue = request_queue
self._response_queue = response_queue
def _sanitize(self) -> None:
"""Sanitize this Payload so that it can be deep-copied."""
self._origin = self.origin_name
self._consumer = self.consumer_name
self._request_queue = None
self._response_queue = None
def exists_remote(self):
"""Check if the payload exists remotely on the attached orgin Work.
Raises:
RuntimeError: If the payload is not attached to any Work (origin undefined).
"""
# Fail early if we need to check the remote but an origin is not defined
if not self._origin or self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to check if the payload {self} exists, but the payload is not attached to a LightningWork."
f" Set it as an attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue that with a request for a path existence check
request = _ExistsRequest(source=self.origin_name, name=self._name, path=str(self._path), hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the response to come back
response: _ExistsResponse = self._response_queue.get() # blocking
return response.exists
def get(self) -> Any:
if _is_flow_context():
raise RuntimeError("`Payload.get()` can only be called from within the `run()` method of LightningWork.")
if self._request_queue is None or self._response_queue is None:
raise RuntimeError(
f"Trying to get the file {self}, but the payload is not attached to a LightningApp."
f" Are you trying to get the file from within `__init__`?"
)
if self._origin is None:
raise RuntimeError(
f"Trying to get the file {self}, but the payload is not attached to a LightningWork. Set it as an"
f" attribute to a LightningWork or pass it to the `run()` method."
)
# 1. Send message to orchestrator through queue with details of the transfer
# the source is the name of the work that owns the file that we request
# the destination is determined by the queue, since each work has a dedicated send and recv queue
request = _GetRequest(source=self.origin_name, name=self._name, path=str(self._path), hash=self.hash)
self._request_queue.put(request)
# 2. Wait for the transfer to finish
response: _GetResponse = self._response_queue.get() # blocking
self._validate_get_response(response)
fs = _filesystem()
# 3. Wait until the file appears in shared storage
while not fs.exists(response.path) or fs.info(response.path)["size"] != response.size:
sleep(REMOTE_STORAGE_WAIT)
# 4. Copy the file from the shared storage to the destination on the local filesystem
local_path = self._path
_logger.debug(f"Attempting to copy {str(response.path)} -> {str(local_path)}")
fs.get(str(response.path), str(local_path), recursive=False)
# Ensure the file is properly written
sleep(0.5)
self._value = self.load(local_path)
return self._value
def _validate_get_response(self, response: "_GetResponse") -> None:
if response.source != self._origin or response.hash != self.hash:
raise RuntimeError(
f"Tried to get the file {self} but received a response for a request it did not send. The response"
f" contents are: {response}"
)
if response.exception is not None:
raise RuntimeError(
f"An exception was raised while trying to transfer the contents at {response.path}"
f" from Work {response.source} to {response.destination}. See the full stacktrace above."
) from response.exception
def to_dict(self) -> dict:
"""Serialize this Path to a dictionary."""
return {
"name": self.name,
"origin_name": self.origin_name,
"consumer_name": self.consumer_name,
"metadata": self._metadata,
}
def from_dict(cls, content: dict) -> "_BasePayload":
"""Instantiate a Payload from a dictionary."""
payload = cls(None)
payload._name = content["name"]
payload._origin = content["origin_name"]
payload._consumer = content["consumer_name"]
payload._metadata = content["metadata"]
return payload
def _handle_exists_request(work: "LightningWork", request: _ExistsRequest) -> _ExistsResponse:
return _ExistsResponse(
source=request.source,
path=request.path,
name=request.name,
destination=request.destination,
hash=request.hash,
exists=getattr(work, request.name, None) is not None,
)
def _handle_get_request(work: "LightningWork", request: _GetRequest) -> _GetResponse:
from lightning.app.storage.copier import _copy_files
source_path = pathlib.Path(request.path)
destination_path = _shared_storage_path() / request.hash
response = _GetResponse(
source=request.source,
name=request.name,
path=str(destination_path),
hash=request.hash,
destination=request.destination,
)
try:
payload = getattr(work, request.name)
payload.save(payload.value, source_path)
response.size = source_path.stat().st_size
_copy_files(source_path, destination_path)
_logger.debug(f"All files copied from {request.path} to {response.path}.")
except Exception as ex:
response.exception = ex
return response
The provided code snippet includes necessary dependencies for implementing the `_state_to_json` function. Write a Python function `def _state_to_json(state: Dict[str, Any]) -> Dict[str, Any]` to solve the following problem:
Utility function to make sure that state dict is json serializable.
Here is the function:
def _state_to_json(state: Dict[str, Any]) -> Dict[str, Any]:
"""Utility function to make sure that state dict is json serializable."""
from lightning.app.storage.path import Path
from lightning.app.storage.payload import _BasePayload
state_paths_cleaned = apply_to_collection(state, dtype=(Path, _BasePayload), function=lambda x: x.to_dict())
return apply_to_collection(state_paths_cleaned, dtype=type(NotPresent), function=lambda x: None) | Utility function to make sure that state dict is json serializable. |
155,617 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
COMPONENT_CONTEXT: Optional[ComponentContext] = None
class ComponentContext(enum.Enum):
def _set_work_context() -> None:
global COMPONENT_CONTEXT
COMPONENT_CONTEXT = ComponentContext.WORK | null |
155,618 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
COMPONENT_CONTEXT: Optional[ComponentContext] = None
class ComponentContext(enum.Enum):
"""Describes whether the current process is running LightningFlow or LightningWork."""
FLOW = "flow"
WORK = "work"
FRONTEND = "frontend"
def _is_flow_context() -> bool:
global COMPONENT_CONTEXT
return COMPONENT_CONTEXT == ComponentContext.FLOW | null |
155,619 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
COMPONENT_CONTEXT: Optional[ComponentContext] = None
class ComponentContext(enum.Enum):
"""Describes whether the current process is running LightningFlow or LightningWork."""
FLOW = "flow"
WORK = "work"
FRONTEND = "frontend"
def _is_work_context() -> bool:
global COMPONENT_CONTEXT
return COMPONENT_CONTEXT == ComponentContext.WORK | null |
155,620 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
COMPONENT_CONTEXT: Optional[ComponentContext] = None
class ComponentContext(enum.Enum):
"""Describes whether the current process is running LightningFlow or LightningWork."""
FLOW = "flow"
WORK = "work"
FRONTEND = "frontend"
def _is_frontend_context() -> bool:
global COMPONENT_CONTEXT
return COMPONENT_CONTEXT == ComponentContext.FRONTEND | null |
155,621 | import os
from contextlib import contextmanager
from typing import TYPE_CHECKING, Any, Dict, Generator, Optional
from deepdiff.helper import NotPresent
from lightning_utilities.core.apply_func import apply_to_collection
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.enum import ComponentContext
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
from lightning.app.utilities.tree import breadth_first
def is_overridden(method_name: str, instance: Optional[object] = None, parent: Optional[Type[object]] = None) -> bool:
if instance is None:
return False
if parent is None:
if isinstance(instance, lightning.app.LightningFlow):
parent = lightning.app.LightningFlow
elif isinstance(instance, lightning.app.LightningWork):
parent = lightning.app.LightningWork
if parent is None:
raise ValueError("Expected a parent")
from lightning_utilities.core.overrides import is_overridden
return is_overridden(method_name, instance, parent)
class LightningFlow:
_INTERNAL_STATE_VARS = {
# Internal protected variables that are still part of the state (even though they are prefixed with "_")
"_paths",
"_layout",
}
def __init__(self) -> None:
"""The LightningFlow is used by the :class:`~lightning.app.core.app.LightningApp` to coordinate and manage
long- running jobs contained, the :class:`~lightning.app.core.work.LightningWork`.
A LightningFlow is characterized by:
* A set of state variables.
* Long-running jobs (:class:`~lightning.app.core.work.LightningWork`).
* Its children ``LightningFlow`` or ``LightningWork`` with their state variables.
**State variables**
The LightningFlow are special classes whose attributes require to be
json-serializable (e.g., int, float, bool, list, dict, ...).
They also may not reach into global variables unless they are constant.
The attributes need to be all defined in `__init__` method,
and eventually assigned to different values throughout the lifetime of the object.
However, defining new attributes outside of `__init__` is not allowed.
Attributes taken together represent the state of the component.
Components are capable of retrieving their state and that of their
children recursively at any time. They are also capable of setting
an externally provided state recursively to its children.
**Execution model and work**
The entry point for execution is the ``run`` method at the root component.
The ``run`` method of the root component may call the ``run`` method of its children, and the children
may call the ``run`` methods of their children and so on.
The ``run`` method of the root component is called repeatedly in a while loop forever until the app gets
terminated. In this programming model (reminiscent of React, Vue or Streamlit from the JavaScript world),
the values of the state variables, or their changes, are translated into actions throughout the component
hierarchy. This means the flow of execution will only be affected by state changes in a component or one of
its children, and otherwise remain idempotent.
The actions themselves are self-contained within :class:`~lightning.app.core.work.LightningWork`.
The :class:`~lightning.app.core.work.LightningWork` are typically used for long-running jobs,
like downloading a dataset, performing a query, starting a computationally heavy script.
While one may access any state variable in a LightningWork from a LightningFlow, one may not
directly call methods of other components from within a LightningWork as LightningWork can't have any children.
This limitation allows applications to be distributed at scale.
**Component hierarchy and App**
Given the above characteristics, a root LightningFlow, potentially containing
children components, can be passed to an App object and its execution
can be distributed (each LightningWork will be run within its own process
or different arrangements).
Example:
>>> from lightning.app import LightningFlow
>>> class RootFlow(LightningFlow):
... def __init__(self):
... super().__init__()
... self.counter = 0
... def run(self):
... self.counter += 1
...
>>> flow = RootFlow()
>>> flow.run()
>>> assert flow.counter == 1
>>> assert flow.state["vars"]["counter"] == 1
"""
self._state: set = set()
self._name: str = ""
self._flows: set = set()
self._works: set = set()
self._structures: set = set()
self._calls: dict = {}
self._changes: dict = {}
self._layout: Union[List[Dict], Dict] = {}
self._paths: dict = {}
self._backend: Optional["Backend"] = None
# tuple instead of a list so that it cannot be modified without using the setter
self._lightningignore: Tuple[str, ...] = ()
def name(self) -> str:
"""Return the current LightningFlow name."""
return self._name or "root"
def __setattr__(self, name: str, value: Any) -> None:
attr = getattr(self.__class__, name, None)
if isinstance(attr, property) and attr.fset is not None:
return attr.fset(self, value)
from lightning.app.structures import Dict as ComponentDict
from lightning.app.structures import List as ComponentList
if (
not _is_init_context(self)
and name not in self._state
and name not in self._paths
and (
not isinstance(value, (LightningWork, LightningFlow))
or (isinstance(value, (LightningWork, LightningFlow)) and not _is_run_context(self))
)
and name not in self._works.union(self._flows)
and self._is_state_attribute(name)
):
raise AttributeError(f"Cannot set attributes that were not defined in __init__: {name}")
if isinstance(value, str) and value.startswith("lit://"):
value = Path(value)
if self._is_state_attribute(name):
if hasattr(self, name):
if name in self._flows and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the flow can't be changed once defined: {name}")
if name in self._works and value != getattr(self, name):
raise AttributeError(f"Cannot set attributes as the work can't be changed once defined: {name}")
if isinstance(value, (list, dict)) and value:
_type = (LightningFlow, LightningWork, ComponentList, ComponentDict)
if isinstance(value, list) and all(isinstance(va, _type) for va in value):
value = ComponentList(*value)
if isinstance(value, dict) and all(isinstance(va, _type) for va in value.values()):
value = ComponentDict(**value)
if isinstance(value, LightningFlow):
self._flows.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
# Attach the backend to the flow and its children work.
if self._backend:
LightningFlow._attach_backend(value, self._backend)
for work in value.works():
work._register_cloud_compute()
elif isinstance(value, LightningWork):
self._works.add(name)
_set_child_name(self, value, name)
if name in self._state:
self._state.remove(name)
if self._backend:
self._backend._wrap_run_method(_LightningAppRef().get_current(), value) # type: ignore[arg-type]
value._register_cloud_compute()
elif isinstance(value, (ComponentDict, ComponentList)):
self._structures.add(name)
_set_child_name(self, value, name)
_backend = getattr(self, "backend", None)
if _backend is not None:
value._backend = _backend
for flow in value.flows:
if _backend is not None:
LightningFlow._attach_backend(flow, _backend)
for work in value.works:
work._register_cloud_compute()
if _backend is not None:
_backend._wrap_run_method(_LightningAppRef().get_current(), work)
elif isinstance(value, Path):
# In the init context, the full name of the Flow and Work is not known, i.e., we can't serialize
# the path without losing the information of origin and consumer. Hence, we delay the serialization
# of the path object until the app is instantiated.
if not _is_init_context(self):
self._paths[name] = value.to_dict()
self._state.add(name)
elif isinstance(value, Drive):
value = deepcopy(value)
value.component_name = self.name
self._state.add(name)
elif isinstance(value, CloudCompute):
self._state.add(name)
elif _is_json_serializable(value):
self._state.add(name)
if not isinstance(value, Path) and hasattr(self, "_paths") and name in self._paths:
# The attribute changed type from Path to another
self._paths.pop(name)
else:
raise AttributeError(
f"Only JSON-serializable attributes are currently supported"
f" (str, int, float, bool, tuple, list, dict etc.) to be part of {self} state. "
f"Found the attribute {name} with {value} instead. \n"
"HINT: Private attributes defined as follows `self._x = y` won't be shared between components "
"and therefore don't need to be JSON-serializable."
)
super().__setattr__(name, value)
return None
def _attach_backend(flow: "LightningFlow", backend: "Backend") -> None:
"""Attach the backend to all flows and its children."""
flow._backend = backend
for name in flow._structures:
getattr(flow, name)._backend = backend
for child_flow in flow.flows.values():
child_flow._backend = backend
for name in child_flow._structures:
getattr(child_flow, name)._backend = backend
app = _LightningAppRef().get_current()
for child_work in flow.works():
child_work._backend = backend
backend._wrap_run_method(app, child_work) # type: ignore[arg-type]
def __getattr__(self, item: str) -> Any:
if item in self.__dict__.get("_paths", {}):
return Path.from_dict(self._paths[item])
return self.__getattribute__(item)
def ready(self) -> bool:
"""Override to customize when your App should be ready."""
flows = self.flows
return all(flow.ready for flow in flows.values()) if flows else True
def changes(self) -> dict:
return self._changes.copy()
def state(self) -> dict:
"""Returns the current flow state along its children."""
children_state = {child: getattr(self, child).state for child in self._flows}
works_state = {work: getattr(self, work).state for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state for child in self._structures},
"changes": {},
}
def state_vars(self) -> dict:
children_state = {child: getattr(self, child).state_vars for child in self._flows}
works_state = {work: getattr(self, work).state_vars for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_vars for child in self._structures},
}
def state_with_changes(self) -> dict:
children_state = {child: getattr(self, child).state_with_changes for child in self._flows}
works_state = {work: getattr(self, work).state_with_changes for work in self._works}
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
# this may have the challenge that ret cannot be pickled, we'll need to handle this
"calls": self._calls.copy(),
"flows": children_state,
"works": works_state,
"structures": {child: getattr(self, child).state_with_changes for child in self._structures},
"changes": self.changes,
}
def flows(self) -> Dict[str, "LightningFlow"]:
"""Return its children LightningFlow."""
flows = {}
for el in sorted(self._flows):
flow = getattr(self, el)
flows[flow.name] = flow
flows.update(flow.flows)
for struct_name in sorted(self._structures):
flows.update(getattr(self, struct_name).flows)
return flows
def lightningignore(self) -> Tuple[str, ...]:
"""Programmatic equivalent of the ``.lightningignore`` file."""
return self._lightningignore
def lightningignore(self, lightningignore: Tuple[str, ...]) -> None:
if self._backend is not None:
raise RuntimeError(
f"Your app has been already dispatched, so modifying the `{self.name}.lightningignore` does not have an"
" effect"
)
self._lightningignore = lightningignore
def works(self, recurse: bool = True) -> List[LightningWork]:
"""Return its :class:`~lightning.app.core.work.LightningWork`."""
works = [getattr(self, el) for el in sorted(self._works)]
if not recurse:
return works
for child_name in sorted(self._flows):
for w in getattr(self, child_name).works(recurse=recurse):
works.append(w)
for struct_name in sorted(self._structures):
for w in getattr(self, struct_name).works:
works.append(w)
return works
def named_works(self, recurse: bool = True) -> List[Tuple[str, LightningWork]]:
"""Return its :class:`~lightning.app.core.work.LightningWork` with their names."""
return [(w.name, w) for w in self.works(recurse=recurse)]
def set_state(self, provided_state: Dict, recurse: bool = True) -> None:
"""Method to set the state to this LightningFlow, its children and
:class:`~lightning.app.core.work.LightningWork`.
Arguments:
provided_state: The state to be reloaded
recurse: Whether to apply the state down children.
"""
for k, v in provided_state["vars"].items():
if isinstance(v, Dict):
v = _maybe_create_drive(self.name, v)
if isinstance(v, Dict):
v = _maybe_create_cloud_compute(v)
setattr(self, k, v)
self._changes = provided_state["changes"]
self._calls.update(provided_state["calls"])
if not recurse:
return
for child, state in provided_state["flows"].items():
getattr(self, child).set_state(state)
for work, state in provided_state["works"].items():
getattr(self, work).set_state(state)
for structure, state in provided_state["structures"].items():
getattr(self, structure).set_state(state)
def stop(self, end_msg: str = "") -> None:
"""Method used to exit the application."""
if end_msg:
print(end_msg)
raise ExitAppException
def fail(self, end_msg: str = "") -> None:
"""Method used to exit and fail the application."""
if end_msg:
print(end_msg)
raise LightningFlowException
def _exit(self, end_msg: str = "") -> None:
"""Used to exit the application.
Private method.
.. deprecated:: 1.9.0
This function is deprecated and will be removed in 2.0.0. Use :meth:`stop` instead.
"""
warnings.warn(
DeprecationWarning(
"This function is deprecated and will be removed in 2.0.0. Use `LightningFlow.stop` instead."
)
)
return self.stop(end_msg=end_msg)
def _is_state_attribute(name: str) -> bool:
"""Every public attribute is part of the state by default and all protected (prefixed by '_') or private
(prefixed by '__') attributes are not.
Exceptions are listed in the `_INTERNAL_STATE_VARS` class variable.
"""
return name in LightningFlow._INTERNAL_STATE_VARS or not name.startswith("_")
def run(self, *args: Any, **kwargs: Any) -> None:
"""Override with your own logic."""
pass
def schedule(
self, cron_pattern: str, start_time: Optional[datetime] = None, user_key: Optional[str] = None
) -> bool:
"""The schedule method is used to run a part of the flow logic on timely manner.
.. code-block:: python
from lightning.app import LightningFlow
class Flow(LightningFlow):
def run(self):
if self.schedule("hourly"):
print("run some code every hour")
Arguments:
cron_pattern: The cron pattern to provide. Learn more at https://crontab.guru/.
start_time: The start time of the cron job.
user_key: Optional key used to improve the caching mechanism.
A best practice is to avoid running a dynamic flow or work under the self.schedule method.
Instead, instantiate them within the condition, but run them outside.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.structures import List
class SchedulerDAG(LightningFlow):
def __init__(self):
super().__init__()
self.dags = List()
def run(self):
if self.schedule("hourly"):
self.dags.append(DAG(...))
for dag in self.dags:
payload = dag.run()
**Learn more about Scheduling**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Schedule your components
:description: Learn more scheduling.
:col_css: col-md-4
:button_link: ../../../glossary/scheduling.html
:height: 180
:tag: Basic
.. displayitem::
:header: Build your own DAG
:description: Learn more DAG scheduling with examples.
:col_css: col-md-4
:button_link: ../../../examples/app/dag/dag.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{cron_pattern}.{frame.f_code.co_filename}.{frame.f_lineno}"
else:
cache_key = user_key
call_hash = f"{self.schedule.__name__}:{DeepHash(cache_key)[cache_key]}"
if "scheduling" not in self._calls:
self._calls["scheduling"] = {}
entered = call_hash in self._calls["scheduling"]
expr_aliases = {
"midnight": "@midnight",
"hourly": "@hourly",
"daily": "@daily",
"weekly": "@weekly",
"monthly": "@monthly",
"yearly": "@yearly",
"annually": "@annually",
}
if cron_pattern in expr_aliases:
cron_pattern = expr_aliases[cron_pattern]
if not entered:
if not start_time:
start_time = datetime.now()
schedule_metadata = {
"running": False,
"cron_pattern": cron_pattern,
"start_time": str(start_time.isoformat()),
"name": self.name,
}
self._calls["scheduling"][call_hash] = schedule_metadata
app = _LightningAppRef().get_current()
if app:
app._register_schedule(call_hash, schedule_metadata)
return True
return self._calls["scheduling"][call_hash]["running"]
def _enable_schedule(self, call_hash: str) -> None:
self._calls["scheduling"][call_hash]["running"] = True
def _disable_running_schedules(self) -> None:
if "scheduling" not in self._calls:
return
for call_hash in self._calls["scheduling"]:
self._calls["scheduling"][call_hash]["running"] = False
def configure_layout(self) -> Union[Dict[str, Any], List[Dict[str, Any]], Frontend]:
"""Configure the UI layout of this LightningFlow.
You can either
1. Return a single :class:`~lightning.app.frontend.frontend.Frontend` object to serve a user interface
for this Flow.
2. Return a single dictionary to expose the UI of a child flow.
3. Return a list of dictionaries to arrange the children of this flow in one or multiple tabs.
**Example:** Serve a static directory (with at least a file index.html inside).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StaticWebFrontend("path/to/folder/to/serve")
**Example:** Serve a streamlit UI (needs the streamlit package to be installed).
.. code-block:: python
from lightning.app.frontend import StaticWebFrontend
class Flow(LightningFlow):
...
def configure_layout(self):
return StreamlitFrontend(render_fn=my_streamlit_ui)
def my_streamlit_ui(state):
# add your streamlit code here!
import streamlit as st
**Example:** Arrange the UI of my children in tabs (default UI by Lightning).
.. code-block:: python
class Flow(LightningFlow):
def configure_layout(self):
return [
dict(name="First Tab", content=self.child0),
dict(name="Second Tab", content=self.child1),
dict(name="Lightning", content="https://lightning.ai"),
]
If you don't implement ``configure_layout``, Lightning will collect all children and display their UI in a tab
(if they have their own ``configure_layout`` implemented).
Note:
This hook gets called at the time of app creation and then again as part of the loop. If desired, the
returned layout configuration can depend on the state. The only exception are the flows that return a
:class:`~lightning.app.frontend.frontend.Frontend`. These need to be provided at the time of app creation
in order for the runtime to start the server.
**Learn more about adding UI**
.. raw:: html
<div class="display-card-container">
<div class="row">
.. displayitem::
:header: Add a web user interface (UI)
:description: Learn more how to integrate several UIs.
:col_css: col-md-4
:button_link: ../../../workflows/add_web_ui/index.html
:height: 180
:tag: Basic
.. raw:: html
</div>
</div>
<br />
"""
return [{"name": name, "content": component} for (name, component) in self.flows.items()]
def experimental_iterate(self, iterable: Iterable, run_once: bool = True, user_key: str = "") -> Generator:
"""This method should always be used with any kind of iterable to ensure its fault tolerant.
If you want your iterable to always be consumed from scratch, you shouldn't use this method.
Arguments:
iterable: Iterable to iterate over. The iterable shouldn't have side effects or be random.
run_once: Whether to run the entire iteration only once.
Otherwise, it would restart from the beginning.
user_key: Key to be used to track the caching mechanism.
"""
if not isinstance(iterable, Iterable):
raise TypeError(f"An iterable should be provided to `self.iterate` method. Found {iterable}")
# TODO: Find a better way. Investigated using __reduce__, but state change invalidate the cache.
if not user_key:
frame = cast(FrameType, inspect.currentframe()).f_back
assert frame is not None
cache_key = f"{frame.f_code.co_filename}.{frame.f_code.co_firstlineno}"
else:
cache_key = user_key
call_hash = f"{self.experimental_iterate.__name__}:{DeepHash(cache_key)[cache_key]}"
entered = call_hash in self._calls
has_started = entered and self._calls[call_hash]["counter"] > 0
has_finished = entered and self._calls[call_hash]["has_finished"]
if has_finished:
if not run_once:
self._calls[call_hash].update({"counter": 0, "has_finished": False})
else:
return range(0)
if not has_started:
self._calls[call_hash] = {
"name": self.experimental_iterate.__name__,
"call_hash": call_hash,
"counter": 0,
"has_finished": False,
}
skip_counter = max(self._calls[call_hash]["counter"], 0)
for counter, value in enumerate(iterable):
if skip_counter:
skip_counter -= 1
continue
self._calls[call_hash].update({"counter": counter})
yield value
self._calls[call_hash].update({"has_finished": True})
def configure_commands(self) -> None:
"""Configure the commands of this LightningFlow.
Returns a list of dictionaries mapping a command name to a flow method.
.. code-block:: python
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def configure_commands(self):
return {"my_command_name": self.my_remote_method}
def my_remote_method(self, name):
self.names.append(name)
Once the app is running with the following command:
.. code-block:: bash
lightning_app run app app.py
.. code-block:: bash
lightning_app my_command_name --args name=my_own_name
"""
raise NotImplementedError
def configure_api(self) -> None:
"""Configure the API routes of the LightningFlow.
Returns a list of HttpMethod such as Post or Get.
.. code-block:: python
from lightning.app import LightningFlow
from lightning.app.api import Post
from pydantic import BaseModel
class HandlerModel(BaseModel):
name: str
class Flow(LightningFlow):
def __init__(self):
super().__init__()
self.names = []
def handler(self, config: HandlerModel) -> None:
self.names.append(config.name)
def configure_api(self):
return [Post("/v1/api/request", self.handler)]
Once the app is running, you can access the Swagger UI of the app
under the ``/docs`` route.
"""
raise NotImplementedError
def state_dict(self) -> dict:
"""Returns the current flow state but not its children."""
return {
"vars": _sanitize_state({el: getattr(self, el) for el in self._state}),
"calls": self._calls.copy(),
"changes": {},
"flows": {},
"works": {},
"structures": {},
}
def load_state_dict(
self,
flow_state: Dict[str, Any],
children_states: Dict[str, Any],
strict: bool = True,
) -> None:
"""Reloads the state of this flow and its children.
.. code-block:: python
class Work(LightningWork):
def __init__(self):
super().__init__()
self.counter = 0
def run(self):
self.counter += 1
class Flow(LightningFlow):
def run(self):
# dynamically create a work.
if not getattr(self, "w", None):
self.w = WorkReload()
self.w.run()
def load_state_dict(self, flow_state, children_states, strict) -> None:
# 1: Re-instantiate the dynamic work
self.w = Work()
# 2: Make any states modification / migration.
...
# 3: Call the parent ``load_state_dict`` to
# recursively reload the states.
super().load_state_dict(
flow_state,
children_states,
strict,
)
Arguments:
flow_state: The state of the current flow.
children_states: The state of the dynamic children of this flow.
strict: Whether to raise an exception if a dynamic
children hasn't been re-created.
"""
self.set_state(flow_state, recurse=False)
direct_children_states = {k: v for k, v in children_states.items() if "." not in k}
for child_name, state in direct_children_states.items():
child = getattr(self, child_name, None)
if isinstance(child, LightningFlow):
lower_children_states = {
k.replace(child_name + ".", ""): v
for k, v in children_states.items()
if k.startswith(child_name) and k != child_name
}
child.load_state_dict(state, lower_children_states, strict=strict)
elif isinstance(child, LightningWork):
child.set_state(state)
elif strict:
raise ValueError(f"The component {child_name} wasn't instantiated for the component {self.name}")
def _validate_root_flow(flow: "LightningFlow") -> None:
from lightning.app.core.flow import LightningFlow
if not is_overridden("run", instance=flow, parent=LightningFlow):
raise TypeError(
"The root flow passed to `LightningApp` does not override the `run()` method. This is required. Please"
f" implement `run()` in your `{flow.__class__.__name__}` class."
) | null |
155,622 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def requires(module_paths: Union[str, List]):
if not isinstance(module_paths, list):
module_paths = [module_paths]
def decorator(func):
def wrapper(*args: Any, **kwargs: Any):
unavailable_modules = [f"'{module}'" for module in module_paths if not module_available(module)]
if any(unavailable_modules):
is_lit_testing = bool(int(os.getenv("LIGHTING_TESTING", "0")))
msg = f"Required dependencies not available. Please run: pip install {' '.join(unavailable_modules)}"
if is_lit_testing:
warnings.warn(msg)
else:
raise ModuleNotFoundError(msg)
return func(*args, **kwargs)
return wrapper
return decorator
__package_name__ = "lightning.app".split(".")[0]
if __package_name__ == "lightning":
_PACKAGE_ROOT = os.path.dirname(_PACKAGE_ROOT)
_PROJECT_ROOT = os.path.dirname(_PROJECT_ROOT)
The provided code snippet includes necessary dependencies for implementing the `_get_extras` function. Write a Python function `def _get_extras(extras: str) -> str` to solve the following problem:
Get the given extras as a space delimited string. Used by the platform to install cloud extras in the cloud.
Here is the function:
def _get_extras(extras: str) -> str:
"""Get the given extras as a space delimited string.
Used by the platform to install cloud extras in the cloud.
"""
from lightning.app import __package_name__
requirements = {r: Requirement(r) for r in metadata.requires(__package_name__)}
marker = Marker(f'extra == "{extras}"')
requirements = [r for r, req in requirements.items() if str(req.marker) == str(marker)]
if requirements:
requirements = [f"'{r.split(';')[0].strip()}'" for r in requirements]
return " ".join(requirements)
return "" | Get the given extras as a space delimited string. Used by the platform to install cloud extras in the cloud. |
155,623 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_torch_available() -> bool:
return module_available("torch") | null |
155,624 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_pytorch_lightning_available() -> bool:
return module_available("lightning.pytorch") | null |
155,625 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_torchvision_available() -> bool:
return module_available("torchvision") | null |
155,626 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_json_argparse_available() -> bool:
return module_available("jsonargparse") | null |
155,627 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_streamlit_available() -> bool:
return module_available("streamlit") | null |
155,628 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_param_available() -> bool:
return module_available("param") | null |
155,629 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_streamlit_tensorboard_available() -> bool:
return module_available("streamlit_tensorboard") | null |
155,630 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_gradio_available() -> bool:
return module_available("gradio") | null |
155,631 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_lightning_flash_available() -> bool:
return module_available("flash") | null |
155,632 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_pil_available() -> bool:
return module_available("PIL") | null |
155,633 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_numpy_available() -> bool:
return module_available("numpy") | null |
155,634 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_docker_available() -> bool:
return module_available("docker") | null |
155,635 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_jinja2_available() -> bool:
return module_available("jinja2") | null |
155,636 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_playwright_available() -> bool:
return module_available("playwright") | null |
155,637 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_s3fs_available() -> bool:
return module_available("s3fs") | null |
155,638 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_sqlmodel_available() -> bool:
return module_available("sqlmodel") | null |
155,639 | import functools
import os
import platform
import sys
import warnings
from typing import Any, List, Union
from lightning_utilities.core.imports import module_available
from packaging.requirements import Marker, Requirement
def _is_aiohttp_available() -> bool:
return module_available("aiohttp") | null |
155,640 | from typing import Dict, Iterable
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.network import LightningClient
def _get_project(client: LightningClient, project_id: Optional[str] = None, verbose: bool = True) -> V1Membership:
"""Get a project membership for the user from the backend."""
if project_id is None:
project_id = LIGHTNING_CLOUD_PROJECT_ID
if project_id is not None:
project = client.projects_service_get_project(project_id)
if not project:
raise ValueError(
"Environment variable `LIGHTNING_CLOUD_PROJECT_ID` is set but could not find an associated project."
)
return V1Membership(
name=project.name,
display_name=project.display_name,
description=project.description,
created_at=project.created_at,
project_id=project.id,
owner_id=project.owner_id,
owner_type=project.owner_type,
quotas=project.quotas,
updated_at=project.updated_at,
)
projects = client.projects_service_list_memberships()
if len(projects.memberships) == 0:
raise ValueError("No valid projects found. Please reach out to lightning.ai team to create a project")
if len(projects.memberships) > 1 and verbose:
print(f"Defaulting to the project: {projects.memberships[0].name}")
return projects.memberships[0]
The provided code snippet includes necessary dependencies for implementing the `_names_to_ids` function. Write a Python function `def _names_to_ids(secret_names: Iterable[str]) -> Dict[str, str]` to solve the following problem:
Returns the name/ID pair for each given Secret name. Raises a `ValueError` if any of the given Secret names do not exist.
Here is the function:
def _names_to_ids(secret_names: Iterable[str]) -> Dict[str, str]:
"""Returns the name/ID pair for each given Secret name.
Raises a `ValueError` if any of the given Secret names do not exist.
"""
lightning_client = LightningClient()
project = _get_project(lightning_client)
secrets = lightning_client.secret_service_list_secrets(project_id=project.project_id)
secret_names_to_ids: Dict[str, str] = {}
for secret in secrets.secrets:
if secret.name in secret_names:
secret_names_to_ids[secret.name] = secret.id
for secret_name in secret_names:
if secret_name not in secret_names_to_ids:
raise ValueError(f"Secret with name '{secret_name}' not found")
return secret_names_to_ids | Returns the name/ID pair for each given Secret name. Raises a `ValueError` if any of the given Secret names do not exist. |
155,641 | from typing import Dict
from lightning_cloud.openapi import ApiClient, AuthServiceApi, V1LoginRequest
from lightning.app.utilities.login import Auth
The provided code snippet includes necessary dependencies for implementing the `_credential_string_to_basic_auth_params` function. Write a Python function `def _credential_string_to_basic_auth_params(credential_string: str) -> Dict[str, str]` to solve the following problem:
Returns the name/ID pair for each given Secret name. Raises a `ValueError` if any of the given Secret names do not exist.
Here is the function:
def _credential_string_to_basic_auth_params(credential_string: str) -> Dict[str, str]:
"""Returns the name/ID pair for each given Secret name.
Raises a `ValueError` if any of the given Secret names do not exist.
"""
if credential_string.count(":") != 1:
raise ValueError(
"Credential string must follow the format username:password; "
+ f"the provided one ('{credential_string}') does not."
)
username, password = credential_string.split(":")
if not username:
raise ValueError("Username cannot be empty.")
if not password:
raise ValueError("Password cannot be empty.")
return {"username": username, "password": password} | Returns the name/ID pair for each given Secret name. Raises a `ValueError` if any of the given Secret names do not exist. |
155,642 | import ast
import inspect
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Type, Union
def _is_method_context(component: Union["LightningFlow", "LightningWork"], selected_caller_name: str) -> bool:
"""Checks whether the call to a component originates from within the context of the component's ``__init__``
method."""
frame = inspect.currentframe().f_back
while frame is not None:
caller_name = frame.f_code.co_name
caller_self = frame.f_locals.get("self")
if caller_name == selected_caller_name and caller_self is component:
# the call originates from a frame under component.__init__
return True
frame = frame.f_back
return False
The provided code snippet includes necessary dependencies for implementing the `_is_init_context` function. Write a Python function `def _is_init_context(component: Union["LightningFlow", "LightningWork"]) -> bool` to solve the following problem:
Checks whether the call to a component originates from within the context of the component's ``__init__`` method.
Here is the function:
def _is_init_context(component: Union["LightningFlow", "LightningWork"]) -> bool:
"""Checks whether the call to a component originates from within the context of the component's ``__init__``
method."""
return _is_method_context(component, "__init__") | Checks whether the call to a component originates from within the context of the component's ``__init__`` method. |
155,643 | import ast
import inspect
from pathlib import Path
from typing import TYPE_CHECKING, Any, Dict, List, Optional, Set, Type, Union
def _is_method_context(component: Union["LightningFlow", "LightningWork"], selected_caller_name: str) -> bool:
"""Checks whether the call to a component originates from within the context of the component's ``__init__``
method."""
frame = inspect.currentframe().f_back
while frame is not None:
caller_name = frame.f_code.co_name
caller_self = frame.f_locals.get("self")
if caller_name == selected_caller_name and caller_self is component:
# the call originates from a frame under component.__init__
return True
frame = frame.f_back
return False
The provided code snippet includes necessary dependencies for implementing the `_is_run_context` function. Write a Python function `def _is_run_context(component: Union["LightningFlow", "LightningWork"]) -> bool` to solve the following problem:
Checks whether the call to a component originates from within the context of the component's ``run`` method.
Here is the function:
def _is_run_context(component: Union["LightningFlow", "LightningWork"]) -> bool:
"""Checks whether the call to a component originates from within the context of the component's ``run`` method."""
return _is_method_context(component, "run") or _is_method_context(component, "load_state_dict") | Checks whether the call to a component originates from within the context of the component's ``run`` method. |
155,644 | from dataclasses import dataclass
from typing import List, Optional
from bs4 import BeautifulSoup
class AppInfo:
title: Optional[str] = None
favicon: Optional[str] = None
description: Optional[str] = None
image: Optional[str] = None
# ensure the meta tags are correct or the UI might fail to load.
meta_tags: Optional[List[str]] = None
on_connect_end: Optional[str] = None
def _get_updated_content(original: str, root_path: str, info: AppInfo) -> str:
soup = BeautifulSoup(original, "html.parser")
# replace favicon
if info.favicon:
soup.find("link", {"rel": "icon"}).attrs["href"] = info.favicon
if info.title is not None:
soup.find("title").string = info.title
if info.description:
soup.find("meta", {"name": "description"}).attrs["content"] = info.description
if info.image:
soup.find("meta", {"property": "og:image"}).attrs["content"] = info.image
if info.meta_tags:
for meta in info.meta_tags:
soup.find("head").append(BeautifulSoup(meta, "html.parser"))
if root_path:
# this will be used by lightning app ui to add root_path to add requests
soup.find("head").append(BeautifulSoup(f'<script>window.app_prefix="{root_path}"</script>', "html.parser"))
return str(soup).replace("/static", f"{root_path}/static")
def update_index_file(ui_root: str, info: Optional[AppInfo] = None, root_path: str = "") -> None:
import shutil
from pathlib import Path
entry_file = Path(ui_root) / "index.html"
original_file = Path(ui_root) / "index.original.html"
if not original_file.exists():
shutil.copyfile(entry_file, original_file) # keep backup
else:
# revert index.html in case it was modified after creating original.html
shutil.copyfile(original_file, entry_file)
if info:
with original_file.open() as f:
original = f.read()
with entry_file.open("w") as f:
f.write(_get_updated_content(original=original, root_path=root_path, info=info))
if root_path:
root_path_without_slash = root_path.replace("/", "", 1) if root_path.startswith("/") else root_path
src_dir = Path(ui_root)
dst_dir = src_dir / root_path_without_slash
if dst_dir.exists():
shutil.rmtree(dst_dir, ignore_errors=True)
# copy everything except the current root_path, this is to fix a bug if user specifies
# /abc at first and then /abc/def, server don't start
# ideally we should copy everything except custom root_path that user passed.
shutil.copytree(src_dir, dst_dir, ignore=shutil.ignore_patterns(f"{root_path_without_slash}*")) | null |
155,645 | from dataclasses import asdict, dataclass
from typing import Dict, List, Optional, Tuple, Union
from uuid import uuid4
from lightning.app.core.constants import ENABLE_MULTIPLE_WORKS_IN_NON_DEFAULT_CONTAINER, enable_interruptible_works
from lightning.app.storage.mount import Mount
class Mount:
"""Allows you to mount the contents of an AWS S3 bucket on disk when running an app on the cloud.
Arguments:
source: The location which contains the external data which should be mounted in the
running work. At the moment, only AWS S3 mounts are supported. This must be a full
`s3` style identifier pointing to a bucket and (optionally) prefix to mount. For
example: `s3://foo/bar/`.
mount_path: An absolute directory path in the work where external data source should
be mounted as a filesystem. This path should not already exist in your codebase.
If not included, then the root_dir will be set to `/data/<last folder name in the bucket>`
"""
source: str = ""
mount_path: str = ""
def __post_init__(self) -> None:
for protocol in __MOUNT_PROTOCOLS__:
if self.source.startswith(protocol):
protocol = protocol
break
else: # N.B. for-else loop
raise ValueError(
f"Unknown protocol for the mount 'source' argument '{self.source}`. The 'source' "
f"string must start with one of the following prefixes: {__MOUNT_PROTOCOLS__}"
)
if protocol == "s3://" and not self.source.endswith("/"):
raise ValueError(
"S3 mounts must end in a trailing slash (`/`) to indicate a folder is being mounted. "
f"Received: '{self.source}'. Mounting a single file is not currently supported."
)
if self.mount_path == "":
self.mount_path = f"/data/{Path(self.source).stem}"
if not os.path.isabs(self.mount_path):
raise ValueError(
f"mount_path argument must be an absolute path to a "
f"location; received relative path {self.mount_path}"
)
def protocol(self) -> str:
"""The backing storage protocol indicated by this drive source."""
for protocol in __MOUNT_PROTOCOLS__:
if self.source.startswith(protocol):
return protocol
return ""
def _verify_mount_root_dirs_are_unique(mounts: Union[None, Mount, List[Mount], Tuple[Mount]]) -> None:
if isinstance(mounts, (list, tuple, set)):
mount_paths = [mount.mount_path for mount in mounts]
if len(set(mount_paths)) != len(mount_paths):
raise ValueError("Every Mount attached to a work must have a unique 'mount_path' argument.") | null |
155,646 | from dataclasses import asdict, dataclass
from typing import Dict, List, Optional, Tuple, Union
from uuid import uuid4
from lightning.app.core.constants import ENABLE_MULTIPLE_WORKS_IN_NON_DEFAULT_CONTAINER, enable_interruptible_works
from lightning.app.storage.mount import Mount
__CLOUD_COMPUTE_IDENTIFIER__ = "__cloud_compute__"
class CloudCompute:
"""Configure the cloud runtime for a lightning work or flow.
Arguments:
name: The name of the hardware to use. A full list of supported options can be found in
:doc:`/core_api/lightning_work/compute`. If you have a request for more hardware options, please contact
`onprem@lightning.ai <mailto:onprem@lightning.ai>`_.
disk_size: The disk size in Gigabytes.
The value you set here will be allocated to the /home folder.
idle_timeout: The number of seconds to wait before pausing the compute when the work is running and idle.
This timeout starts whenever your run() method succeeds (or fails).
If the timeout is reached, the instance pauses until the next run() call happens.
shm_size: Shared memory size in MiB, backed by RAM. min 512, max 8192, it will auto update in steps of 512.
For example 1100 will become 1024. If set to zero (the default) will get the default 64MiB inside docker.
mounts: External data sources which should be mounted into a work as a filesystem at runtime.
colocation_group_id: Identifier for groups of works to be colocated in the same datacenter.
Set this to a string of max. 64 characters and all works with this group id will run in the same datacenter.
If not set, the works are not guaranteed to be colocated.
interruptible: Whether to run on a interruptible machine e.g the machine can be stopped
at any time by the providers. This is also known as spot or preemptible machines.
Compared to on-demand machines, they tend to be cheaper.
"""
name: str = "default"
disk_size: int = 0
idle_timeout: Optional[int] = None
shm_size: Optional[int] = None
mounts: Optional[Union[Mount, List[Mount]]] = None
colocation_group_id: Optional[str] = None
interruptible: bool = False
_internal_id: Optional[str] = None
def __post_init__(self) -> None:
_verify_mount_root_dirs_are_unique(self.mounts)
self.name = self.name.lower()
if self.shm_size is None:
if "gpu" in self.name:
self.shm_size = 1024
else:
self.shm_size = 0
if self.interruptible:
if not enable_interruptible_works():
raise ValueError("CloudCompute with `interruptible=True` isn't supported yet.")
if "gpu" not in self.name:
raise ValueError("CloudCompute `interruptible=True` is supported only with GPU.")
# FIXME: Clean the mess on the platform side
if self.name == "default" or self.name == "cpu":
self.name = "cpu-small"
self._internal_id = "default"
# TODO: Remove from the platform first.
self.preemptible = self.interruptible
# All `default` CloudCompute are identified in the same way.
if self._internal_id is None:
self._internal_id = self._generate_id()
if self.colocation_group_id is not None and (
not isinstance(self.colocation_group_id, str)
or (isinstance(self.colocation_group_id, str) and len(self.colocation_group_id) > 64)
):
raise ValueError("colocation_group_id can only be a string of maximum 64 characters.")
def to_dict(self) -> dict:
_verify_mount_root_dirs_are_unique(self.mounts)
return {"type": __CLOUD_COMPUTE_IDENTIFIER__, **asdict(self)}
def from_dict(cls, d: dict) -> "CloudCompute":
assert d.pop("type") == __CLOUD_COMPUTE_IDENTIFIER__
mounts = d.pop("mounts", None)
if mounts is None:
pass
elif isinstance(mounts, dict):
d["mounts"] = Mount(**mounts)
elif isinstance(mounts, (list)):
d["mounts"] = []
for mount in mounts:
d["mounts"].append(Mount(**mount))
else:
raise TypeError(
f"mounts argument must be one of [None, Mount, List[Mount]], "
f"received {mounts} of type {type(mounts)}"
)
_verify_mount_root_dirs_are_unique(d.get("mounts"))
return cls(**d)
def id(self) -> Optional[str]:
return self._internal_id
def is_default(self) -> bool:
return self.name in ("default", "cpu-small")
def _generate_id(self):
return "default" if self.name == "default" else uuid4().hex[:7]
def clone(self):
new_dict = self.to_dict()
new_dict["_internal_id"] = self._generate_id()
return self.from_dict(new_dict)
def _maybe_create_cloud_compute(state: Dict) -> Union[CloudCompute, Dict]:
if state and state.get("type") == __CLOUD_COMPUTE_IDENTIFIER__:
return CloudCompute.from_dict(state)
return state | null |
155,647 | import pathlib
from dataclasses import asdict, dataclass, field
from typing import Union
import yaml
from lightning.app.utilities.name_generator import get_unique_name
_APP_CONFIG_FILENAME = ".lightning"
The provided code snippet includes necessary dependencies for implementing the `_get_config_file` function. Write a Python function `def _get_config_file(source_path: Union[str, pathlib.Path]) -> pathlib.Path` to solve the following problem:
Get the Lightning app config file '.lightning' at the given source path. Args: source_path: A path to a folder or a file.
Here is the function:
def _get_config_file(source_path: Union[str, pathlib.Path]) -> pathlib.Path:
"""Get the Lightning app config file '.lightning' at the given source path.
Args:
source_path: A path to a folder or a file.
"""
source_path = pathlib.Path(source_path).absolute()
if source_path.is_file():
source_path = source_path.parent
return pathlib.Path(source_path / _APP_CONFIG_FILENAME) | Get the Lightning app config file '.lightning' at the given source path. Args: source_path: A path to a folder or a file. |
155,648 | import os
import shutil
import tarfile
import tarfile
The provided code snippet includes necessary dependencies for implementing the `clean_tarfile` function. Write a Python function `def clean_tarfile(file_path: str, mode: str) -> None` to solve the following problem:
This utility removes all files extracted from a tarfile.
Here is the function:
def clean_tarfile(file_path: str, mode: str) -> None:
"""This utility removes all files extracted from a tarfile."""
if not os.path.exists(file_path):
return
with tarfile.open(file_path, mode=mode) as tar_ref:
for member in tar_ref.getmembers():
p = member.path
if p == "." or not os.path.exists(p):
continue
try:
if os.path.isfile(p):
os.remove(p)
else:
shutil.rmtree(p)
except (FileNotFoundError, OSError, PermissionError):
pass
if os.path.exists(file_path):
os.remove(file_path) | This utility removes all files extracted from a tarfile. |
155,649 | import os
import shutil
import tarfile
import tarfile
The provided code snippet includes necessary dependencies for implementing the `extract_tarfile` function. Write a Python function `def extract_tarfile(file_path: str, extract_path: str, mode: str) -> None` to solve the following problem:
This utility extracts all files from a tarfile.
Here is the function:
def extract_tarfile(file_path: str, extract_path: str, mode: str) -> None:
"""This utility extracts all files from a tarfile."""
if not os.path.exists(file_path):
return
with tarfile.open(file_path, mode=mode) as tar_ref:
for member in tar_ref.getmembers():
try:
tar_ref.extract(member, path=extract_path, set_attrs=False)
except PermissionError:
raise PermissionError(f"Could not extract tar file {file_path}") | This utility extracts all files from a tarfile. |
155,650 | import inspect
import os
import re
from dataclasses import asdict, dataclass, field
from pathlib import Path
from typing import TYPE_CHECKING, Dict, List, Optional, Union
from typing_extensions import Self
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
The provided code snippet includes necessary dependencies for implementing the `load_requirements` function. Write a Python function `def load_requirements( path_dir: str, file_name: str = "base.txt", comment_char: str = "#", unfreeze: bool = True ) -> List[str]` to solve the following problem:
Load requirements from a file.
Here is the function:
def load_requirements(
path_dir: str, file_name: str = "base.txt", comment_char: str = "#", unfreeze: bool = True
) -> List[str]:
"""Load requirements from a file."""
path = os.path.join(path_dir, file_name)
if not os.path.isfile(path):
return []
with open(path) as file:
lines = [ln.strip() for ln in file.readlines()]
reqs = []
for ln in lines:
# filer all comments
comment = ""
if comment_char in ln:
comment = ln[ln.index(comment_char) :]
ln = ln[: ln.index(comment_char)]
req = ln.strip()
# skip directly installed dependencies
if not req or req.startswith("http") or "@http" in req:
continue
# remove version restrictions unless they are strict
if unfreeze and "<" in req and "strict" not in comment:
req = re.sub(r",? *<=? *[\d\.\*]+", "", req).strip()
reqs.append(req)
return reqs | Load requirements from a file. |
155,651 | import inspect
import os
import re
from dataclasses import asdict, dataclass, field
from pathlib import Path
from typing import TYPE_CHECKING, Dict, List, Optional, Union
from typing_extensions import Self
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.packaging.cloud_compute import CloudCompute
logger = Logger(__name__)
def _get_work_file(work: "LightningWork") -> Optional[str]:
cls = work.__class__
try:
return inspect.getfile(cls)
except TypeError:
logger.debug(f"The {cls.__name__} file couldn't be found.")
return None | null |
155,652 | import functools
import logging
import os
import pathlib
import shutil
import subprocess
import sys
import tarfile
import tempfile
import urllib.request
from pathlib import Path
from typing import Any, Callable, Optional
from packaging.version import Version
from lightning.app import _PROJECT_ROOT, _logger, _root_logger
from lightning.app import __version__ as version
from lightning.app.core.constants import FRONTEND_DIR, PACKAGE_LIGHTNING
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.git import check_github_repository, get_dir_name
def download_frontend(root: str = _PROJECT_ROOT):
"""Downloads an archive file for a specific release of the Lightning frontend and extracts it to the correct
directory."""
build_dir = "build"
frontend_dir = pathlib.Path(FRONTEND_DIR)
download_dir = tempfile.mkdtemp()
shutil.rmtree(frontend_dir, ignore_errors=True)
response = urllib.request.urlopen(LIGHTNING_FRONTEND_RELEASE_URL) # noqa: S310
file = tarfile.open(fileobj=response, mode="r|gz")
file.extractall(path=download_dir) # noqa: S202
shutil.move(os.path.join(download_dir, build_dir), frontend_dir)
print("The Lightning UI has successfully been downloaded!")
def _cleanup(*tar_files: str):
for tar_file in tar_files:
shutil.rmtree(os.path.join(_PROJECT_ROOT, "dist"), ignore_errors=True)
os.remove(tar_file)
def _prepare_wheel(path):
with open("log.txt", "w") as logfile:
with subprocess.Popen(
["rm", "-r", "dist"], stdout=logfile, stderr=logfile, bufsize=0, close_fds=True, cwd=path
) as proc:
proc.wait()
with subprocess.Popen(
["python", "setup.py", "sdist"],
stdout=logfile,
stderr=logfile,
bufsize=0,
close_fds=True,
cwd=path,
) as proc:
proc.wait()
os.remove("log.txt")
def _copy_tar(project_root, dest: Path) -> str:
dist_dir = os.path.join(project_root, "dist")
tar_files = os.listdir(dist_dir)
assert len(tar_files) == 1
tar_name = tar_files[0]
tar_path = os.path.join(dist_dir, tar_name)
shutil.copy(tar_path, dest)
return tar_name
def get_dist_path_if_editable_install(project_name) -> str:
"""Is distribution an editable install - modified version from pip that
fetches egg-info instead of egg-link"""
for path_item in sys.path:
if not os.path.isdir(path_item):
continue
egg_info = os.path.join(path_item, project_name + ".egg-info")
if os.path.isdir(egg_info):
return path_item
return ""
_PROJECT_ROOT = os.path.dirname(os.path.dirname(_PACKAGE_ROOT))
PACKAGE_LIGHTNING = os.getenv("PACKAGE_LIGHTNING", None)
def get_dir_name(cwd=None) -> str:
github_repository = execute_git_command(["config", "--get", "remote.origin.url"], cwd=cwd)
if github_repository and "github.com" in github_repository:
return github_repository.split("/")[-1].split(".")[0]
raise RuntimeError("Only work with github repositories.")
def check_github_repository(cwd=None) -> bool:
"""Checks if the active directory is a GitHub repository."""
github_repository = execute_git_command(["config", "--get", "remote.origin.url"], cwd=cwd)
if not github_repository or "github.com" not in github_repository:
return False
return True
The provided code snippet includes necessary dependencies for implementing the `_prepare_lightning_wheels_and_requirements` function. Write a Python function `def _prepare_lightning_wheels_and_requirements(root: Path, package_name: str = "lightning") -> Optional[Callable]` to solve the following problem:
This function determines if lightning is installed in editable mode (for developers) and packages the current lightning source along with the app. For normal users who install via PyPi or Conda, then this function does not do anything.
Here is the function:
def _prepare_lightning_wheels_and_requirements(root: Path, package_name: str = "lightning") -> Optional[Callable]:
"""This function determines if lightning is installed in editable mode (for developers) and packages the current
lightning source along with the app.
For normal users who install via PyPi or Conda, then this function does not do anything.
"""
if not get_dist_path_if_editable_install(package_name):
return None
os.environ["PACKAGE_NAME"] = "app" if package_name == "lightning" + "_app" else "lightning"
# Packaging the Lightning codebase happens only inside the `lightning` repo.
git_dir_name = get_dir_name() if check_github_repository() else None
is_lightning = git_dir_name and git_dir_name == package_name
if (PACKAGE_LIGHTNING is None and not is_lightning) or PACKAGE_LIGHTNING == "0":
return None
download_frontend(_PROJECT_ROOT)
_prepare_wheel(_PROJECT_ROOT)
# todo: check why logging.info is missing in outputs
print(f"Packaged Lightning with your application. Version: {version}")
tar_name = _copy_tar(_PROJECT_ROOT, root)
tar_files = [os.path.join(root, tar_name)]
# Don't skip by default
if (PACKAGE_LIGHTNING or is_lightning) and not bool(int(os.getenv("SKIP_LIGHTING_UTILITY_WHEELS_BUILD", "0"))):
# building and copying lightning-cloud wheel if installed in editable mode
lightning_cloud_project_path = get_dist_path_if_editable_install("lightning_cloud")
if lightning_cloud_project_path:
from lightning_cloud.__version__ import __version__ as cloud_version
# todo: check why logging.info is missing in outputs
print(f"Packaged Lightning Cloud with your application. Version: {cloud_version}")
_prepare_wheel(lightning_cloud_project_path)
tar_name = _copy_tar(lightning_cloud_project_path, root)
tar_files.append(os.path.join(root, tar_name))
lightning_launcher_project_path = get_dist_path_if_editable_install("lightning_launcher")
if lightning_launcher_project_path:
from lightning_launcher.__version__ import __version__ as cloud_version
# todo: check why logging.info is missing in outputs
print(f"Packaged Lightning Launcher with your application. Version: {cloud_version}")
_prepare_wheel(lightning_launcher_project_path)
tar_name = _copy_tar(lightning_launcher_project_path, root)
tar_files.append(os.path.join(root, tar_name))
return functools.partial(_cleanup, *tar_files) | This function determines if lightning is installed in editable mode (for developers) and packages the current lightning source along with the app. For normal users who install via PyPi or Conda, then this function does not do anything. |
155,653 | import functools
import logging
import os
import pathlib
import shutil
import subprocess
import sys
import tarfile
import tempfile
import urllib.request
from pathlib import Path
from typing import Any, Callable, Optional
from packaging.version import Version
from lightning.app import _PROJECT_ROOT, _logger, _root_logger
from lightning.app import __version__ as version
from lightning.app.core.constants import FRONTEND_DIR, PACKAGE_LIGHTNING
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.git import check_github_repository, get_dir_name
def _enable_debugging():
tar_file = os.path.join(os.getcwd(), f"lightning-{version}.tar.gz")
if not os.path.exists(tar_file):
return
_root_logger.propagate = True
_logger.propagate = True
_root_logger.setLevel(logging.DEBUG)
_root_logger.debug("Setting debugging mode.")
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.INFO)
The provided code snippet includes necessary dependencies for implementing the `enable_debugging` function. Write a Python function `def enable_debugging(func: Callable) -> Callable` to solve the following problem:
This function is used to transform any print into logger.info calls, so it gets tracked in the cloud.
Here is the function:
def enable_debugging(func: Callable) -> Callable:
"""This function is used to transform any print into logger.info calls, so it gets tracked in the cloud."""
@functools.wraps(func)
def wrapper(*args: Any, **kwargs: Any) -> Any:
_enable_debugging()
res = func(*args, **kwargs)
_logger.setLevel(logging.INFO)
return res
return wrapper | This function is used to transform any print into logger.info calls, so it gets tracked in the cloud. |
155,654 | import functools
import logging
import os
import pathlib
import shutil
import subprocess
import sys
import tarfile
import tempfile
import urllib.request
from pathlib import Path
from typing import Any, Callable, Optional
from packaging.version import Version
from lightning.app import _PROJECT_ROOT, _logger, _root_logger
from lightning.app import __version__ as version
from lightning.app.core.constants import FRONTEND_DIR, PACKAGE_LIGHTNING
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.git import check_github_repository, get_dir_name
def _fetch_latest_version(package_name: str) -> str:
args = [
sys.executable,
"-m",
"pip",
"install",
f"{package_name}==1000",
]
proc = subprocess.Popen(args, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, bufsize=0, close_fds=True)
if proc.stdout:
logs = " ".join([line.decode("utf-8") for line in iter(proc.stdout.readline, b"")])
return logs.split(")\n")[0].split(",")[-1].replace(" ", "")
return version
The provided code snippet includes necessary dependencies for implementing the `_verify_lightning_version` function. Write a Python function `def _verify_lightning_version()` to solve the following problem:
This function verifies that users are running the latest lightning version for the cloud.
Here is the function:
def _verify_lightning_version():
"""This function verifies that users are running the latest lightning version for the cloud."""
# TODO (tchaton) Add support for windows
if sys.platform == "win32":
return
lightning_latest_version = _fetch_latest_version("lightning")
if Version(lightning_latest_version) > Version(version):
raise Exception(
f"You need to use the latest version of Lightning ({lightning_latest_version}) to run in the cloud. "
"Please, run `pip install -U lightning`"
) | This function verifies that users are running the latest lightning version for the cloud. |
155,655 | import logging
from typing import Tuple
import click
from lightning.app.core.constants import APP_SERVER_HOST, APP_SERVER_PORT
from lightning.app.launcher.launcher import (
run_lightning_flow,
run_lightning_work,
serve_frontend,
start_application_server,
start_flow_and_servers,
)
The provided code snippet includes necessary dependencies for implementing the `launch` function. Write a Python function `def launch() -> None` to solve the following problem:
Launch your application.
Here is the function:
def launch() -> None:
"""Launch your application.""" | Launch your application. |
155,656 | import logging
from typing import Tuple
import click
from lightning.app.core.constants import APP_SERVER_HOST, APP_SERVER_PORT
from lightning.app.launcher.launcher import (
run_lightning_flow,
run_lightning_work,
serve_frontend,
start_application_server,
start_flow_and_servers,
)
logger = logging.getLogger(__name__)
def start_application_server(
entrypoint_file: str, host: str, port: int, queue_id: str, queues: Optional[FlowRestAPIQueues] = None
):
logger.debug(f"Run Lightning Work {entrypoint_file} {host} {port} {queue_id}")
queue_system = QueuingSystem(CLOUD_QUEUE_TYPE)
wait_for_queues(queue_system)
kwargs = {
"api_delta_queue": queue_system.get_api_delta_queue(queue_id=queue_id),
}
# Note: Override the queues if provided
if isinstance(queues, Dict):
kwargs.update(queues)
else:
kwargs.update({
"api_publish_state_queue": queue_system.get_api_state_publish_queue(queue_id=queue_id),
"api_response_queue": queue_system.get_api_response_queue(queue_id=queue_id),
})
app = load_app_from_file(entrypoint_file)
from lightning.app.api.http_methods import _add_tags_to_api, _validate_api
from lightning.app.utilities.app_helpers import is_overridden
from lightning.app.utilities.commands.base import _commands_to_api, _prepare_commands
apis = []
if is_overridden("configure_api", app.root):
apis = app.root.configure_api()
_validate_api(apis)
_add_tags_to_api(apis, ["app_api"])
if is_overridden("configure_commands", app.root):
commands = _prepare_commands(app)
apis += _commands_to_api(commands)
start_server(
host=host,
port=port,
apis=apis,
**kwargs,
spec=extract_metadata_from_app(app),
)
The provided code snippet includes necessary dependencies for implementing the `run_server` function. Write a Python function `def run_server(file: str, queue_id: str, host: str, port: int) -> None` to solve the following problem:
It takes the application file as input, build the application object and then use that to run the application server. This is used by the cloud runners to start the status server for the application
Here is the function:
def run_server(file: str, queue_id: str, host: str, port: int) -> None:
"""It takes the application file as input, build the application object and then use that to run the application
server.
This is used by the cloud runners to start the status server for the application
"""
logger.debug(f"Run Server: {file} {queue_id} {host} {port}")
start_application_server(file, host, port, queue_id=queue_id) | It takes the application file as input, build the application object and then use that to run the application server. This is used by the cloud runners to start the status server for the application |
155,657 | import logging
from typing import Tuple
import click
from lightning.app.core.constants import APP_SERVER_HOST, APP_SERVER_PORT
from lightning.app.launcher.launcher import (
run_lightning_flow,
run_lightning_work,
serve_frontend,
start_application_server,
start_flow_and_servers,
)
logger = logging.getLogger(__name__)
def run_lightning_flow(entrypoint_file: str, queue_id: str, base_url: str, queues: Optional[FlowRestAPIQueues] = None):
_set_flow_context()
logger.debug(f"Run Lightning Flow {entrypoint_file} {queue_id} {base_url}")
app = load_app_from_file(entrypoint_file)
app.backend = CloudBackend(entrypoint_file, queue_id=queue_id)
queue_system = app.backend.queues
app.backend.update_lightning_app_frontend(app)
wait_for_queues(queue_system)
app.backend.resolve_url(app, base_url)
if app.root_path != "":
app._update_index_file()
app.backend._prepare_queues(app)
# Note: Override the queues if provided
if queues:
app.api_publish_state_queue = queues["api_publish_state_queue"]
app.api_response_queue = queues["api_response_queue"]
LightningFlow._attach_backend(app.root, app.backend)
app.should_publish_changes_to_api = True
storage_orchestrator = StorageOrchestrator(
app,
app.request_queues,
app.response_queues,
app.copy_request_queues,
app.copy_response_queues,
)
storage_orchestrator.setDaemon(True)
storage_orchestrator.start()
# refresh the layout with the populated urls.
app._update_layout()
# register a signal handler to clean all works.
if sys.platform != "win32":
signal.signal(signal.SIGTERM, partial(_sigterm_flow_handler, app=app))
if "apis" in inspect.signature(start_server).parameters:
from lightning.app.utilities.commands.base import _prepare_commands
_prepare_commands(app)
# Once the bootstrapping is done, running the rank 0
# app with all the components inactive
try:
app._run()
except ExitAppException:
pass
except Exception:
app.stage = AppStage.FAILED
print(traceback.format_exc())
storage_orchestrator.join(0)
app.backend.stop_all_works(app.works)
exit_code = 1 if app.stage == AppStage.FAILED else 0
print(f"Finishing the App with exit_code: {str(exit_code)}...")
if not exit_code:
app.backend.stop_app(app)
sys.exit(exit_code)
The provided code snippet includes necessary dependencies for implementing the `run_flow` function. Write a Python function `def run_flow(file: str, queue_id: str, base_url: str) -> None` to solve the following problem:
It takes the application file as input, build the application object, proxy all the work components and then run the application flow defined in the root component. It does exactly what a singleprocess dispatcher would do but with proxied work components.
Here is the function:
def run_flow(file: str, queue_id: str, base_url: str) -> None:
"""It takes the application file as input, build the application object, proxy all the work components and then run
the application flow defined in the root component.
It does exactly what a singleprocess dispatcher would do but with proxied work components.
"""
logger.debug(f"Run Flow: {file} {queue_id} {base_url}")
run_lightning_flow(file, queue_id=queue_id, base_url=base_url) | It takes the application file as input, build the application object, proxy all the work components and then run the application flow defined in the root component. It does exactly what a singleprocess dispatcher would do but with proxied work components. |
155,658 | import logging
from typing import Tuple
import click
from lightning.app.core.constants import APP_SERVER_HOST, APP_SERVER_PORT
from lightning.app.launcher.launcher import (
run_lightning_flow,
run_lightning_work,
serve_frontend,
start_application_server,
start_flow_and_servers,
)
logger = logging.getLogger(__name__)
def run_lightning_work(
file: str,
work_name: str,
queue_id: str,
):
"""This staticmethod runs the specified work in the current process.
It is organized under cloud runtime to indicate that it will be used by the cloud runner but otherwise, no cloud
specific logic is being implemented here
"""
logger.debug(f"Run Lightning Work {file} {work_name} {queue_id}")
queues = QueuingSystem(CLOUD_QUEUE_TYPE)
wait_for_queues(queues)
caller_queue = queues.get_caller_queue(work_name=work_name, queue_id=queue_id)
readiness_queue = queues.get_readiness_queue(queue_id=queue_id)
delta_queue = queues.get_delta_queue(queue_id=queue_id)
error_queue = queues.get_error_queue(queue_id=queue_id)
request_queues = queues.get_orchestrator_request_queue(work_name=work_name, queue_id=queue_id)
response_queues = queues.get_orchestrator_response_queue(work_name=work_name, queue_id=queue_id)
copy_request_queues = queues.get_orchestrator_copy_request_queue(work_name=work_name, queue_id=queue_id)
copy_response_queues = queues.get_orchestrator_copy_response_queue(work_name=work_name, queue_id=queue_id)
run_app_commands(file)
load_app_from_file(file)
queue = queues.get_work_queue(work_name=work_name, queue_id=queue_id)
work = queue.get()
extras = {}
if hasattr(work, "_run_executor_cls"):
extras["run_executor_cls"] = work._run_executor_cls
WorkRunner(
work=work,
work_name=work_name,
caller_queue=caller_queue,
delta_queue=delta_queue,
readiness_queue=readiness_queue,
error_queue=error_queue,
request_queue=request_queues,
response_queue=response_queues,
copy_request_queue=copy_request_queues,
copy_response_queue=copy_response_queues,
**extras,
)()
The provided code snippet includes necessary dependencies for implementing the `run_work` function. Write a Python function `def run_work(file: str, work_name: str, queue_id: str) -> None` to solve the following problem:
Unlike other entrypoints, this command will take the file path or module details for a work component and run that by fetching the states from the queues.
Here is the function:
def run_work(file: str, work_name: str, queue_id: str) -> None:
"""Unlike other entrypoints, this command will take the file path or module details for a work component and run
that by fetching the states from the queues."""
logger.debug(f"Run Work: {file} {work_name} {queue_id}")
run_lightning_work(
file=file,
work_name=work_name,
queue_id=queue_id,
) | Unlike other entrypoints, this command will take the file path or module details for a work component and run that by fetching the states from the queues. |
155,659 | import logging
from typing import Tuple
import click
from lightning.app.core.constants import APP_SERVER_HOST, APP_SERVER_PORT
from lightning.app.launcher.launcher import (
run_lightning_flow,
run_lightning_work,
serve_frontend,
start_application_server,
start_flow_and_servers,
)
logger = logging.getLogger(__name__)
def serve_frontend(file: str, flow_name: str, host: str, port: int):
"""This staticmethod runs the specified frontend for a given flow in a new process.
It is organized under cloud runtime to indicate that it will be used by the cloud runner but otherwise, no cloud
specific logic is being implemented here.
"""
_set_frontend_context()
logger.debug(f"Run Serve Frontend {file} {flow_name} {host} {port}")
app = load_app_from_file(file)
if flow_name not in app.frontends:
raise ValueError(f"Could not find frontend for flow with name {flow_name}.")
frontend = app.frontends[flow_name]
assert frontend.flow.name == flow_name
frontend.start_server(host, port)
The provided code snippet includes necessary dependencies for implementing the `run_frontend` function. Write a Python function `def run_frontend(file: str, flow_name: str, host: str, port: int) -> None` to solve the following problem:
Serve the frontend specified by the given flow.
Here is the function:
def run_frontend(file: str, flow_name: str, host: str, port: int) -> None:
"""Serve the frontend specified by the given flow."""
logger.debug(f"Run Frontend: {file} {flow_name} {host}")
serve_frontend(file=file, flow_name=flow_name, host=host, port=port) | Serve the frontend specified by the given flow. |
155,660 | import logging
from typing import Tuple
import click
from lightning.app.core.constants import APP_SERVER_HOST, APP_SERVER_PORT
from lightning.app.launcher.launcher import (
run_lightning_flow,
run_lightning_work,
serve_frontend,
start_application_server,
start_flow_and_servers,
)
logger = logging.getLogger(__name__)
def start_flow_and_servers(
entrypoint_file: str,
base_url: str,
queue_id: str,
host: str,
port: int,
flow_names_and_ports: Tuple[Tuple[str, int]],
):
processes: List[Tuple[str, Process]] = []
# Queues between Flow and its Rest API are using multiprocessing to:
# - reduce redis load
# - increase UI responsiveness and RPS
queue_system = QueuingSystem.MULTIPROCESS
queues = {
"api_publish_state_queue": queue_system.get_api_state_publish_queue(queue_id=queue_id),
"api_response_queue": queue_system.get_api_response_queue(queue_id=queue_id),
}
# In order to avoid running this function 3 seperate times while executing the
# `run_lightning_flow`, `start_application_server`, & `serve_frontend` functions
# in a subprocess we extract this to the top level. If we intend to make changes
# to be able to start these components in seperate containers, the implementation
# will have to move a call to this function within the initialization process.
run_app_commands(entrypoint_file)
flow_process = start_server_in_process(
run_lightning_flow,
args=(
entrypoint_file,
queue_id,
base_url,
),
kwargs={"queues": queues},
)
processes.append(("Flow", flow_process))
server_process = start_server_in_process(
target=start_application_server,
args=(
entrypoint_file,
host,
port,
queue_id,
),
kwargs={"queues": queues},
)
processes.append(("Server", server_process))
if not flow_names_and_ports:
flow_names_and_ports = _get_frontends_from_app(entrypoint_file)
for name, fe_port in flow_names_and_ports:
frontend_process = start_server_in_process(target=serve_frontend, args=(entrypoint_file, name, host, fe_port))
processes.append((name, frontend_process))
manage_server_processes(processes)
The provided code snippet includes necessary dependencies for implementing the `run_flow_and_servers` function. Write a Python function `def run_flow_and_servers( file: str, base_url: str, queue_id: str, host: str, port: int, flow_port: Tuple[Tuple[str, int]], ) -> None` to solve the following problem:
It takes the application file as input, build the application object and then use that to run the application flow defined in the root component, the application server and all the flow frontends. This is used by the cloud runners to start the flow, the status server and all frontends for the application
Here is the function:
def run_flow_and_servers(
file: str,
base_url: str,
queue_id: str,
host: str,
port: int,
flow_port: Tuple[Tuple[str, int]],
) -> None:
"""It takes the application file as input, build the application object and then use that to run the application
flow defined in the root component, the application server and all the flow frontends.
This is used by the cloud runners to start the flow, the status server and all frontends for the application
"""
logger.debug(f"Run Flow: {file} {queue_id} {base_url}")
logger.debug(f"Run Server: {file} {queue_id} {host} {port}.")
logger.debug(f"Run Frontend's: {flow_port}")
start_flow_and_servers(
entrypoint_file=file,
base_url=base_url,
queue_id=queue_id,
host=host,
port=port,
flow_names_and_ports=flow_port,
) | It takes the application file as input, build the application object and then use that to run the application flow defined in the root component, the application server and all the flow frontends. This is used by the cloud runners to start the flow, the status server and all frontends for the application |
155,661 | import contextlib
import os
import click
import lightning_cloud
import rich
from lightning.app.cli.commands.ls import _add_colors, _get_prefix
from lightning.app.cli.commands.pwd import _pwd
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import _error_and_exit
from lightning.app.utilities.network import LightningClient
def _add_colors(filename: str, color: Optional[str] = None) -> str:
return f"[{color}]{filename}[/{color}]"
def _get_prefix(prefix: str, lit_resource) -> str:
if isinstance(lit_resource, Externalv1LightningappInstance):
return _add_resource_prefix(prefix, f"lightningapps/{lit_resource.id}")
return _add_resource_prefix(prefix, f"cloudspaces/{lit_resource.id}")
def _pwd() -> str:
root = "/"
if not os.path.exists(_CD_FILE):
with open(_CD_FILE, "w") as f:
f.write(root + "\n")
else:
with open(_CD_FILE) as f:
lines = f.readlines()
root = lines[0].replace("\n", "")
return root
def _error_and_exit(msg: str) -> None:
rich.print(f"[red]ERROR[/red]: {msg}")
sys.exit(0)
The provided code snippet includes necessary dependencies for implementing the `rm` function. Write a Python function `def rm(rm_path: str, r: bool = False, recursive: bool = False) -> None` to solve the following problem:
Delete files on the Lightning Cloud filesystem.
Here is the function:
def rm(rm_path: str, r: bool = False, recursive: bool = False) -> None:
"""Delete files on the Lightning Cloud filesystem."""
root = _pwd()
if rm_path in (".", ".."):
return _error_and_exit('rm "." and ".." may not be removed')
if ".." in rm_path:
return _error_and_exit('rm ".." or higher may not be removed')
root = os.path.join(root, rm_path)
splits = [split for split in root.split("/") if split != ""]
if root == "/" or len(splits) == 1:
return _error_and_exit("rm at the project level isn't supported")
client = LightningClient(retry=False)
projects = client.projects_service_list_memberships()
project = [project for project in projects.memberships if project.name == splits[0]]
# This happens if the user changes cluster and the project doesn't exist.
if len(project) == 0:
return _error_and_exit(
f"There isn't any Lightning Project matching the name {splits[0]}." " HINT: Use `lightning cd`."
)
project_id = project[0].project_id
# Parallelise calls
lit_apps = client.lightningapp_instance_service_list_lightningapp_instances(project_id=project_id, async_req=True)
lit_cloud_spaces = client.cloud_space_service_list_cloud_spaces(project_id=project_id, async_req=True)
lit_apps = lit_apps.get().lightningapps
lit_cloud_spaces = lit_cloud_spaces.get().cloudspaces
lit_ressources = [lit_resource for lit_resource in lit_cloud_spaces if lit_resource.name == splits[1]]
if len(lit_ressources) == 0:
lit_ressources = [lit_resource for lit_resource in lit_apps if lit_resource.name == splits[1]]
if len(lit_ressources) == 0:
_error_and_exit(f"There isn't any Lightning Ressource matching the name {splits[1]}.")
lit_resource = lit_ressources[0]
prefix = "/".join(splits[2:])
prefix = _get_prefix(prefix, lit_resource)
clusters = client.projects_service_list_project_cluster_bindings(project_id)
succeeded = False
for cluster in clusters.clusters:
with contextlib.suppress(lightning_cloud.openapi.rest.ApiException):
client.lightningapp_instance_service_delete_project_artifact(
project_id=project_id,
cluster_id=cluster.cluster_id,
filename=prefix,
)
succeeded = True
break
prefix = os.path.join(*splits)
if succeeded:
rich.print(_add_colors(f"Successfuly deleted `{prefix}`.", color="green"))
return None
return _error_and_exit(f"No file or folder named `{prefix}` was found.") | Delete files on the Lightning Cloud filesystem. |
155,662 | import os
import sys
from typing import Dict, Optional
import requests
from lightning.app.cli.connect.app import (
_clean_lightning_connection,
_install_missing_requirements,
_resolve_command_path,
)
from lightning.app.utilities.cli_helpers import _LightningAppOpenAPIRetriever
from lightning.app.utilities.commands.base import _download_command
from lightning.app.utilities.enum import OpenAPITags
def _is_running_help(argv) -> bool:
return argv[-1] in ["--help", "-"] if argv else False
def _handle_command_without_client(command: str, metadata: Dict, url: str) -> None:
supported_params = list(metadata["parameters"])
if _is_running_help(sys.argv):
print(f"Usage: lightning_app {command} [ARGS]...")
print(" ")
print("Options")
for param in supported_params:
print(f" {param}: Add description")
return
provided_params = [param.replace("--", "") for param in sys.argv[1 + len(command.split("_")) :]]
# TODO: Add support for more argument types.
if any("=" not in param for param in provided_params):
raise Exception("Please, use --x=y syntax when providing the command arguments.")
if any(param.split("=")[0] not in supported_params for param in provided_params):
raise Exception(f"Some arguments need to be provided. The keys are {supported_params}.")
# TODO: Encode the parameters and validate their type.
query_parameters = "&".join(provided_params)
resp = requests.post(url + f"/command/{command}?{query_parameters}")
assert resp.status_code == 200, resp.json()
print(resp.json())
def _handle_command_with_client(command: str, metadata: Dict, app_name: str, app_id: Optional[str], url: str):
debug_mode = bool(int(os.getenv("DEBUG", "0")))
if app_name == "localhost":
target_file = metadata["cls_path"]
else:
target_file = _resolve_command_path(command) if debug_mode else _resolve_command_path(command)
if debug_mode:
print(target_file)
client_command = _download_command(
command,
metadata["cls_path"],
metadata["cls_name"],
app_id,
debug_mode=debug_mode,
target_file=target_file if debug_mode else _resolve_command_path(command),
)
client_command._setup(command_name=command, app_url=url)
sys.argv = sys.argv[len(command.split("_")) :]
client_command.run()
def _install_missing_requirements(
retriever: _LightningAppOpenAPIRetriever,
fail_if_missing: bool = False,
):
requirements = set()
for metadata in retriever.api_commands.values():
if metadata["tag"] == OpenAPITags.APP_CLIENT_COMMAND:
for req in metadata.get("requirements", []) or []:
requirements.add(req)
if requirements:
missing_requirements = []
for req in requirements:
if not (package_available(req) or package_available(req.replace("-", "_"))):
missing_requirements.append(req)
if missing_requirements:
if fail_if_missing:
missing_requirements = " ".join(missing_requirements)
print(f"The command failed as you are missing the following requirements: `{missing_requirements}`.")
sys.exit(0)
for req in missing_requirements:
std_out_out = get_logfile("output.log")
with open(std_out_out, "wb") as stdout:
Popen(
f"{sys.executable} -m pip install {req}",
shell=True,
stdout=stdout,
stderr=stdout,
).wait()
os.remove(std_out_out)
def _clean_lightning_connection():
if not os.path.exists(_LIGHTNING_CONNECTION):
return
for ppid in os.listdir(_LIGHTNING_CONNECTION):
try:
psutil.Process(int(ppid))
except (psutil.NoSuchProcess, ValueError):
connection = os.path.join(_LIGHTNING_CONNECTION, str(ppid))
if os.path.exists(connection):
shutil.rmtree(connection)
class _LightningAppOpenAPIRetriever:
def __init__(
self,
app_id_or_name_or_url: Optional[str],
use_cache: bool = False,
):
"""This class encapsulates the logic to collect the openapi.json file from the app to use the CLI Commands.
Arguments:
app_id_or_name_or_url: An identified for the app.
use_cache: Whether to load the openapi spec from the cache.
"""
self.app_id_or_name_or_url = app_id_or_name_or_url
self.url = None
self.openapi = None
self.api_commands = None
self.app_id = None
self.app_name = None
home = os.path.expanduser("~")
if use_cache:
cache_openapi = os.path.join(home, ".lightning", "lightning_connection", "commands", "openapi.json")
if os.path.exists(cache_openapi):
with open(cache_openapi) as f:
self.openapi = json.load(f)
self.api_commands = _extract_command_from_openapi(self.openapi)
if not self.api_commands:
self._collect_open_api_json()
if self.openapi:
self.api_commands = _extract_command_from_openapi(self.openapi)
def is_alive(self) -> bool:
"""Returns whether the Lightning App Rest API is available."""
if self.url is None:
self._maybe_find_url()
if self.url is None:
return False
resp = requests.get(self.url)
return resp.status_code == 200
def _maybe_find_url(self):
"""Tries to resolve the app url from the provided `app_id_or_name_or_url`."""
if _is_url(self.app_id_or_name_or_url):
self.url = self.app_id_or_name_or_url
assert self.url
return
if self.app_id_or_name_or_url is None:
url = f"http://localhost:{APP_SERVER_PORT}"
resp = requests.get(f"{self.url}/openapi.json")
if resp.status_code == 200:
self.url = url
return
app = self._maybe_find_matching_cloud_app()
if app:
self.url = app.status.url
def _maybe_find_matching_cloud_app(self):
"""Tries to resolve the app url from the provided `app_id_or_name_or_url`."""
client = LightningClient(retry=False)
project = _get_project(client)
list_apps = client.lightningapp_instance_service_list_lightningapp_instances(project_id=project.project_id)
app_names = [_get_app_display_name(lit_app) for lit_app in list_apps.lightningapps]
if not self.app_id_or_name_or_url:
print(f"ERROR: Provide an application name, id or url with --app_id=X. Found {app_names}")
sys.exit(0)
for app in list_apps.lightningapps:
if app.id == self.app_id_or_name_or_url or _get_app_display_name(app) == self.app_id_or_name_or_url:
if app.status.url == "":
print("The application is starting. Try in a few moments.")
sys.exit(0)
return app
return None
def _collect_open_api_json(self):
"""This function is used to retrieve the current url associated with an id."""
if _is_url(self.app_id_or_name_or_url):
self.url = self.app_id_or_name_or_url
assert self.url
resp = requests.get(self.url + "/openapi.json")
if resp.status_code != 200:
print(f"ERROR: The server didn't process the request properly. Found {resp.json()}")
sys.exit(0)
self.openapi = resp.json()
return
# 2: If no identifier has been provided, evaluate the local application
if self.app_id_or_name_or_url is None:
with contextlib.suppress(requests.exceptions.ConnectionError):
self.url = f"http://localhost:{APP_SERVER_PORT}"
resp = requests.get(f"{self.url}/openapi.json")
if resp.status_code != 200:
raise Exception(f"The server didn't process the request properly. Found {resp.json()}")
self.openapi = resp.json()
# 3: If an identified was provided or the local evaluation has failed, evaluate the cloud.
else:
app = self._maybe_find_matching_cloud_app()
if app:
if app.status.url == "":
raise Exception("The application is starting. Try in a few moments.")
resp = requests.get(app.status.url + "/openapi.json")
if resp.status_code != 200:
raise Exception(
"The server didn't process the request properly. " "Try once your application is ready."
)
self.url = app.status.url
self.openapi = resp.json()
self.app_id = app.id
self.app_name = _get_app_display_name(app)
class OpenAPITags:
APP_CLIENT_COMMAND = "app_client_command"
APP_COMMAND = "app_command"
APP_API = "app_api"
The provided code snippet includes necessary dependencies for implementing the `_run_app_command` function. Write a Python function `def _run_app_command(app_name: str, app_id: Optional[str])` to solve the following problem:
Execute a function in a running App from its name.
Here is the function:
def _run_app_command(app_name: str, app_id: Optional[str]):
"""Execute a function in a running App from its name."""
# 1: Collect the url and comments from the running application
_clean_lightning_connection()
running_help = _is_running_help(sys.argv)
retriever = _LightningAppOpenAPIRetriever(app_id, use_cache=running_help)
if not running_help and (retriever.url is None or retriever.api_commands is None):
if app_name == "localhost":
print("The command couldn't be executed as your local Lightning App isn't running.")
else:
print(f"The command couldn't be executed as your cloud Lightning App `{app_name}` isn't running.")
sys.exit(0)
if not retriever.api_commands:
raise Exception("This application doesn't expose any commands yet.")
full_command = "_".join(sys.argv)
has_found = False
for command in list(retriever.api_commands):
if command in full_command:
has_found = True
for value in sys.argv:
if value == command and "_" in value:
print(
f"The command `{value}` was provided with an underscore and it isn't allowed."
f"Instead, use `lightning_app {value.replace('_', ' ')}`."
)
sys.exit(0)
break
if not has_found:
raise Exception(f"The provided command isn't available in {list(retriever.api_commands)}")
# 2: Send the command from the user
metadata = retriever.api_commands[command]
try:
# 3: Execute the command
if metadata["tag"] == OpenAPITags.APP_COMMAND:
_handle_command_without_client(command, metadata, retriever.url)
else:
_handle_command_with_client(command, metadata, app_name, app_id, retriever.url)
except ModuleNotFoundError:
_install_missing_requirements(retriever, fail_if_missing=True)
if running_help:
print("Your command execution was successful.") | Execute a function in a running App from its name. |
155,663 | from typing import List
import click
import rich
from rich.color import ANSI_COLOR_NAMES
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.app_logs import _app_logs_reader
from lightning.app.utilities.cloud import _get_project
from lightning.app.utilities.logs_socket_api import _LightningLogsSocketAPI
from lightning.app.utilities.network import LightningClient
def _show_logs(app_name: str, components: List[str], follow: bool) -> None:
client = LightningClient(retry=False)
project = _get_project(client)
apps = {
getattr(app, "display_name", None) or app.name: app
for app in client.lightningapp_instance_service_list_lightningapp_instances(
project_id=project.project_id
).lightningapps
}
if not apps:
raise click.ClickException(
"You don't have any application in the cloud. Please, run an application first with `--cloud`."
)
if not app_name:
raise click.ClickException(
f"You have not specified any Lightning App. Please select one of the following: [{', '.join(apps.keys())}]."
)
if app_name not in apps:
raise click.ClickException(
f"The Lightning App '{app_name}' does not exist. "
f"Please select one of the following: [{', '.join(apps.keys())}]."
)
# Fetch all lightning works from given application
# 'Flow' component is somewhat implicit, only one for whole app,
# and not listed in lightningwork API - so we add it directly to the list
works = client.lightningwork_service_list_lightningwork(
project_id=project.project_id, app_id=apps[app_name].id
).lightningworks
app_component_names = ["flow"] + [f.name for f in apps[app_name].spec.flow_servers] + [w.name for w in works]
if not components:
components = app_component_names
else:
def add_prefix(c: str) -> str:
if c == "flow":
return c
if not c.startswith("root."):
return "root." + c
return c
components = [add_prefix(c) for c in components]
for component in components:
if component not in app_component_names:
raise click.ClickException(f"Component '{component}' does not exist in app {app_name}.")
log_reader = _app_logs_reader(
logs_api_client=_LightningLogsSocketAPI(client.api_client),
project_id=project.project_id,
app_id=apps[app_name].id,
component_names=components,
follow=follow,
)
rich_colors = list(ANSI_COLOR_NAMES)
colors = {c: rich_colors[i + 1] for i, c in enumerate(components)}
for log_event in log_reader:
date = log_event.timestamp.strftime("%m/%d/%Y %H:%M:%S")
color = colors[log_event.component_name]
rich.print(f"[{color}]{log_event.component_name}[/{color}] {date} {log_event.message}")
The provided code snippet includes necessary dependencies for implementing the `logs` function. Write a Python function `def logs(app_name: str, components: List[str], follow: bool) -> None` to solve the following problem:
Show cloud application logs. By default, prints logs for all currently available components. Example uses: Print all application logs: $ lightning show logs my-application Print logs only from the flow (no work): $ lightning show logs my-application flow Print logs only from selected works: $ lightning show logs my-application root.work_a root.work_b
Here is the function:
def logs(app_name: str, components: List[str], follow: bool) -> None:
"""Show cloud application logs. By default, prints logs for all currently available components.
Example uses:
Print all application logs:
$ lightning show logs my-application
Print logs only from the flow (no work):
$ lightning show logs my-application flow
Print logs only from selected works:
$ lightning show logs my-application root.work_a root.work_b
"""
_show_logs(app_name, components, follow) | Show cloud application logs. By default, prints logs for all currently available components. Example uses: Print all application logs: $ lightning show logs my-application Print logs only from the flow (no work): $ lightning show logs my-application flow Print logs only from selected works: $ lightning show logs my-application root.work_a root.work_b |
155,664 | import concurrent
import contextlib
import os
import sys
from functools import partial
from multiprocessing.pool import ApplyResult
from pathlib import Path
from textwrap import dedent
from typing import Any, Optional, Tuple, Union
import click
import requests
import urllib3
from lightning_cloud.openapi import (
Externalv1Cluster,
Externalv1LightningappInstance,
ProjectIdStorageBody,
V1CloudSpace,
)
from rich.live import Live
from rich.progress import BarColumn, DownloadColumn, Progress, TaskID, TextColumn
from rich.spinner import Spinner
from rich.text import Text
from lightning.app.cli.commands.ls import _collect_artifacts, _get_prefix
from lightning.app.cli.commands.pwd import _pwd
from lightning.app.source_code import FileUploader
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.auth import _AuthTokenGetter
from lightning.app.utilities.cli_helpers import _error_and_exit
from lightning.app.utilities.network import LightningClient
def _upload_files(live, client: LightningClient, local_src: str, remote_dst: str, pwd: str) -> str:
remote_splits = [split for split in remote_dst.split("/") if split != ""]
remote_dst = os.path.join(*remote_splits)
if not os.path.exists(local_src):
return _error_and_exit(f"The provided source path {local_src} doesn't exist.")
lit_resource = None
if len(remote_splits) > 1:
project_id, lit_resource = _get_project_id_and_resource(pwd)
else:
project_id = _get_project_id_from_name(remote_dst)
if len(remote_splits) > 2:
remote_dst = os.path.join(*remote_splits[2:])
local_src = Path(local_src).resolve()
upload_paths = []
if os.path.isdir(local_src):
for root_dir, _, paths in os.walk(local_src):
for path in paths:
upload_paths.append(os.path.join(root_dir, path))
else:
upload_paths = [local_src]
_upload_urls = []
clusters = client.projects_service_list_project_cluster_bindings(project_id)
live.stop()
for upload_path in upload_paths:
for cluster in clusters.clusters:
filename = str(upload_path).replace(str(os.getcwd()), "")[1:]
filename = _get_prefix(os.path.join(remote_dst, filename), lit_resource) if lit_resource else "/" + filename
response = client.lightningapp_instance_service_upload_project_artifact(
project_id=project_id,
body=ProjectIdStorageBody(cluster_id=cluster.cluster_id, filename=filename),
async_req=True,
)
_upload_urls.append(response)
upload_urls = []
for upload_url in _upload_urls:
upload_urls.extend(upload_url.get().urls)
live.stop()
if not upload_paths:
print("There were no files to upload.")
return None
progress = _get_progress_bar()
total_size = sum([Path(path).stat().st_size for path in upload_paths]) // max(len(clusters.clusters), 1)
task_id = progress.add_task("upload", filename="", total=total_size)
progress.start()
_upload_partial = partial(_upload, progress=progress, task_id=task_id)
with concurrent.futures.ThreadPoolExecutor(4) as executor:
results = executor.map(_upload_partial, upload_paths, upload_urls)
progress.stop()
# Raise the first exception found
exception = next((e for e in results if isinstance(e, Exception)), None)
if exception:
_error_and_exit("We detected errors in uploading your files.")
return None
return None
def _zip_files(live: Live, remote_src: str, local_dst: str) -> None:
if len(remote_src.split("/")) < 3:
return _error_and_exit(
dedent(
f"""
The source path must be at least two levels deep (e.g. r:/my-project/my-lit-resource).
The path provided was: r:{remote_src}
"""
)
)
if os.path.isdir(local_dst):
local_dst = os.path.join(local_dst, os.path.basename(remote_src) + ".zip")
project_id, lit_resource = _get_project_id_and_resource(remote_src)
# /my-project/my-lit-resource/artfact-path -> cloudspace/my-lit-resource-id/artifact-path
artifact = "/".join(remote_src.split("/")[3:])
prefix = _get_prefix(artifact, lit_resource)
token = _AuthTokenGetter(LightningClient().api_client)._get_api_token()
endpoint = f"/v1/projects/{project_id}/artifacts/download?prefix={prefix}&token={token}"
cluster = _cluster_from_lit_resource(lit_resource)
url = _storage_host(cluster) + endpoint
live.stop()
progress = _get_progress_bar(transient=True)
progress.start()
task_id = progress.add_task("download zip", total=None)
_download_file(local_dst, url, progress, task_id)
progress.stop()
click.echo(f"Downloaded to {local_dst}")
return None
def _download_files(live, client, remote_src: str, local_dst: str, pwd: str):
project_id, lit_resource = _get_project_id_and_resource(pwd)
download_paths = []
download_urls = []
total_size = []
prefix = _get_prefix("/".join(pwd.split("/")[3:]), lit_resource) + "/"
for artifact in _collect_artifacts(client, project_id, prefix, include_download_url=True):
path = os.path.join(local_dst, artifact.filename.replace(remote_src, ""))
path = Path(path).resolve()
os.makedirs(path.parent, exist_ok=True)
download_paths.append(path)
download_urls.append(artifact.url)
total_size.append(int(artifact.size_bytes))
live.stop()
if not download_paths:
print("There were no files to download.")
return
progress = progress = _get_progress_bar()
progress.start()
task_id = progress.add_task("download", filename="", total=sum(total_size))
_download_file_fn = partial(_download_file, progress=progress, task_id=task_id)
with concurrent.futures.ThreadPoolExecutor(4) as executor:
results = executor.map(_download_file_fn, download_paths, download_urls)
progress.stop()
# Raise the first exception found
exception = next((e for e in results if isinstance(e, Exception)), None)
if exception:
_error_and_exit("There was an error downloading your files.")
def _sanitize_path(path: str, pwd: str) -> Tuple[str, bool]:
is_remote = _is_remote(path)
if is_remote:
path = _remove_remote(path)
path = pwd if path == "." else os.path.join(pwd, path)
return path, is_remote
def _pwd() -> str:
root = "/"
if not os.path.exists(_CD_FILE):
with open(_CD_FILE, "w") as f:
f.write(root + "\n")
else:
with open(_CD_FILE) as f:
lines = f.readlines()
root = lines[0].replace("\n", "")
return root
def _error_and_exit(msg: str) -> None:
rich.print(f"[red]ERROR[/red]: {msg}")
sys.exit(0)
The provided code snippet includes necessary dependencies for implementing the `cp` function. Write a Python function `def cp(src_path: str, dst_path: str, r: bool = False, recursive: bool = False, zip: bool = False) -> None` to solve the following problem:
Copy files between your local filesystem and the Lightning Cloud filesystem.
Here is the function:
def cp(src_path: str, dst_path: str, r: bool = False, recursive: bool = False, zip: bool = False) -> None:
"""Copy files between your local filesystem and the Lightning Cloud filesystem."""
if sys.platform == "win32":
print("`cp` isn't supported on windows. Open an issue on Github.")
sys.exit(0)
with Live(Spinner("point", text=Text("pending...", style="white")), transient=True) as live:
pwd = _pwd()
client = LightningClient(retry=False)
src_path, src_remote = _sanitize_path(src_path, pwd)
dst_path, dst_remote = _sanitize_path(dst_path, pwd)
if src_remote and dst_remote:
return _error_and_exit("Moving files remotely isn't supported yet. Please, open a Github issue.")
if not src_remote and dst_remote:
if dst_path == "/" or len(dst_path.split("/")) == 1:
return _error_and_exit("Uploading files at the project level isn't allowed yet.")
if zip:
return _error_and_exit("Zipping uploads isn't supported yet. Please, open a Github issue.")
_upload_files(live, client, src_path, dst_path, pwd)
return None
if src_remote and not dst_remote:
if zip:
return _zip_files(live, src_path, dst_path)
_download_files(live, client, src_path, dst_path, pwd)
return None
return _error_and_exit("Moving files locally isn't supported yet. Please, open a Github issue.") | Copy files between your local filesystem and the Lightning Cloud filesystem. |
155,665 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
def _main() -> None:
pass | null |
155,666 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
The provided code snippet includes necessary dependencies for implementing the `show` function. Write a Python function `def show() -> None` to solve the following problem:
Show given resource.
Here is the function:
def show() -> None:
"""Show given resource."""
pass | Show given resource. |
155,667 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
The provided code snippet includes necessary dependencies for implementing the `connect` function. Write a Python function `def connect() -> None` to solve the following problem:
Connect apps and data.
Here is the function:
def connect() -> None:
"""Connect apps and data."""
pass | Connect apps and data. |
155,668 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
The provided code snippet includes necessary dependencies for implementing the `disconnect` function. Write a Python function `def disconnect() -> None` to solve the following problem:
Disconnect apps.
Here is the function:
def disconnect() -> None:
"""Disconnect apps."""
pass | Disconnect apps. |
155,669 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
def get_lightning_cloud_url() -> str:
# detect local development
if os.getenv("VSCODE_PROXY_URI", "").startswith("http://localhost:9800"):
return "http://localhost:9800"
# DO NOT CHANGE!
return os.getenv("LIGHTNING_CLOUD_URL", "https://lightning.ai")
class Auth:
username: Optional[str] = None
user_id: Optional[str] = None
api_key: Optional[str] = None
secrets_file = pathlib.Path(LIGHTNING_CREDENTIAL_PATH)
def load(self) -> bool:
"""Load credentials from disk and update properties with credentials.
Returns
----------
True if credentials are available.
"""
if not self.secrets_file.exists():
logger.debug("Credentials file not found.")
return False
with self.secrets_file.open() as creds:
credentials = json.load(creds)
for key in Keys:
setattr(self, key.suffix, credentials.get(key.suffix, None))
return True
def save(self, token: str = "", user_id: str = "", api_key: str = "", username: str = "") -> None:
"""Save credentials to disk."""
self.secrets_file.parent.mkdir(exist_ok=True, parents=True)
with self.secrets_file.open("w") as f:
json.dump(
{
f"{Keys.USERNAME.suffix}": username,
f"{Keys.USER_ID.suffix}": user_id,
f"{Keys.API_KEY.suffix}": api_key,
},
f,
)
self.username = username
self.user_id = user_id
self.api_key = api_key
logger.debug("credentials saved successfully")
def clear(self) -> None:
"""Remove credentials from disk."""
if self.secrets_file.exists():
self.secrets_file.unlink()
for key in Keys:
setattr(self, key.suffix, None)
logger.debug("credentials removed successfully")
def auth_header(self) -> Optional[str]:
"""Authentication header used by lightning-cloud client."""
if self.api_key:
token = f"{self.user_id}:{self.api_key}"
return f"Basic {base64.b64encode(token.encode('ascii')).decode('ascii')}" # E501
raise AttributeError(
"Authentication Failed, no authentication header available. "
"This is most likely a bug in the LightningCloud Framework"
)
def _run_server(self) -> None:
"""Start a server to complete authentication."""
AuthServer().login_with_browser(self)
def authenticate(self) -> Optional[str]:
"""Performs end to end authentication flow.
Returns
----------
authorization header to use when authentication completes.
"""
if not self.load():
# First try to authenticate from env
for key in Keys:
setattr(self, key.suffix, os.environ.get(key.value, None))
if self.user_id and self.api_key:
self.save("", self.user_id, self.api_key, self.user_id)
logger.info("Credentials loaded from environment variables")
return self.auth_header
if self.api_key or self.user_id:
raise ValueError(
"To use env vars for authentication both "
f"{Keys.USER_ID.value} and {Keys.API_KEY.value} should be set."
)
logger.debug("failed to load credentials, opening browser to get new.")
self._run_server()
return self.auth_header
if self.user_id and self.api_key:
return self.auth_header
raise ValueError(
"We couldn't find any credentials linked to your account. "
"Please try logging in using the CLI command `lightning_app login`"
)
The provided code snippet includes necessary dependencies for implementing the `login` function. Write a Python function `def login() -> None` to solve the following problem:
Log in to your lightning.ai account.
Here is the function:
def login() -> None:
"""Log in to your lightning.ai account."""
auth = Auth()
auth.clear()
try:
auth.authenticate()
except ConnectionError:
click.echo(f"Unable to connect to {get_lightning_cloud_url()}. Please check your internet connection.")
exit(1) | Log in to your lightning.ai account. |
155,670 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
def disconnect_app(logout: bool = False):
"""Disconnect from an App."""
_clean_lightning_connection()
connected_file = os.path.join(_LIGHTNING_CONNECTION_FOLDER, "connect.txt")
if os.path.exists(connected_file):
with open(connected_file) as f:
result = f.readlines()[0].replace("\n", "")
os.remove(connected_file)
commands_folder = os.path.join(_LIGHTNING_CONNECTION_FOLDER, "commands")
if os.path.exists(commands_folder):
shutil.rmtree(commands_folder)
if result == "localhost":
click.echo("You are disconnected from the local Lightning App.")
else:
click.echo(f"You are disconnected from the cloud Lightning App: {result}.")
else:
if not logout:
click.echo(
"You aren't connected to any Lightning App. "
"Please use `lightning_app connect app_name_or_id` to connect to one."
)
class Auth:
username: Optional[str] = None
user_id: Optional[str] = None
api_key: Optional[str] = None
secrets_file = pathlib.Path(LIGHTNING_CREDENTIAL_PATH)
def load(self) -> bool:
"""Load credentials from disk and update properties with credentials.
Returns
----------
True if credentials are available.
"""
if not self.secrets_file.exists():
logger.debug("Credentials file not found.")
return False
with self.secrets_file.open() as creds:
credentials = json.load(creds)
for key in Keys:
setattr(self, key.suffix, credentials.get(key.suffix, None))
return True
def save(self, token: str = "", user_id: str = "", api_key: str = "", username: str = "") -> None:
"""Save credentials to disk."""
self.secrets_file.parent.mkdir(exist_ok=True, parents=True)
with self.secrets_file.open("w") as f:
json.dump(
{
f"{Keys.USERNAME.suffix}": username,
f"{Keys.USER_ID.suffix}": user_id,
f"{Keys.API_KEY.suffix}": api_key,
},
f,
)
self.username = username
self.user_id = user_id
self.api_key = api_key
logger.debug("credentials saved successfully")
def clear(self) -> None:
"""Remove credentials from disk."""
if self.secrets_file.exists():
self.secrets_file.unlink()
for key in Keys:
setattr(self, key.suffix, None)
logger.debug("credentials removed successfully")
def auth_header(self) -> Optional[str]:
"""Authentication header used by lightning-cloud client."""
if self.api_key:
token = f"{self.user_id}:{self.api_key}"
return f"Basic {base64.b64encode(token.encode('ascii')).decode('ascii')}" # E501
raise AttributeError(
"Authentication Failed, no authentication header available. "
"This is most likely a bug in the LightningCloud Framework"
)
def _run_server(self) -> None:
"""Start a server to complete authentication."""
AuthServer().login_with_browser(self)
def authenticate(self) -> Optional[str]:
"""Performs end to end authentication flow.
Returns
----------
authorization header to use when authentication completes.
"""
if not self.load():
# First try to authenticate from env
for key in Keys:
setattr(self, key.suffix, os.environ.get(key.value, None))
if self.user_id and self.api_key:
self.save("", self.user_id, self.api_key, self.user_id)
logger.info("Credentials loaded from environment variables")
return self.auth_header
if self.api_key or self.user_id:
raise ValueError(
"To use env vars for authentication both "
f"{Keys.USER_ID.value} and {Keys.API_KEY.value} should be set."
)
logger.debug("failed to load credentials, opening browser to get new.")
self._run_server()
return self.auth_header
if self.user_id and self.api_key:
return self.auth_header
raise ValueError(
"We couldn't find any credentials linked to your account. "
"Please try logging in using the CLI command `lightning_app login`"
)
The provided code snippet includes necessary dependencies for implementing the `logout` function. Write a Python function `def logout() -> None` to solve the following problem:
Log out of your lightning.ai account.
Here is the function:
def logout() -> None:
"""Log out of your lightning.ai account."""
Auth().clear()
disconnect_app(logout=True) | Log out of your lightning.ai account. |
155,671 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
The provided code snippet includes necessary dependencies for implementing the `run` function. Write a Python function `def run() -> None` to solve the following problem:
Run a Lightning application locally or on the cloud.
Here is the function:
def run() -> None:
"""Run a Lightning application locally or on the cloud.""" | Run a Lightning application locally or on the cloud. |
155,672 | import os
import sys
from pathlib import Path
from typing import Tuple, Union
import click
from requests.exceptions import ConnectionError
import lightning.app.core.constants as constants
from lightning.app import __version__ as ver
from lightning.app.cli import cmd_init, cmd_install, cmd_pl_init, cmd_react_ui_init
from lightning.app.cli.commands.app_commands import _run_app_command
from lightning.app.cli.commands.cd import cd
from lightning.app.cli.commands.cp import cp
from lightning.app.cli.commands.logs import logs
from lightning.app.cli.commands.ls import ls
from lightning.app.cli.commands.pwd import pwd
from lightning.app.cli.commands.rm import rm
from lightning.app.cli.connect.app import (
_list_app_commands,
_retrieve_connection_to_an_app,
connect_app,
disconnect_app,
)
from lightning.app.cli.connect.data import connect_data
from lightning.app.cli.lightning_cli_delete import delete
from lightning.app.cli.lightning_cli_launch import launch
from lightning.app.cli.lightning_cli_list import get_list
from lightning.app.core.constants import ENABLE_APP_COMMENT_COMMAND_EXECUTION, get_lightning_cloud_url
from lightning.app.runners.cloud import CloudRuntime
from lightning.app.runners.runtime import dispatch
from lightning.app.runners.runtime_type import RuntimeType
from lightning.app.utilities.app_commands import run_app_commands
from lightning.app.utilities.app_helpers import Logger
from lightning.app.utilities.cli_helpers import (
_check_environment_and_redirect,
_check_version_and_upgrade,
_format_input_env_variables,
)
from lightning.app.utilities.exceptions import _ApiExceptionHandler
from lightning.app.utilities.login import Auth
from lightning.app.utilities.port import _find_lit_app_port
The provided code snippet includes necessary dependencies for implementing the `init` function. Write a Python function `def init() -> None` to solve the following problem:
Init a Lightning App and/or component.
Here is the function:
def init() -> None:
"""Init a Lightning App and/or component.""" | Init a Lightning App and/or component. |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.