Oliveira-juliano commited on
Commit
75a31c0
·
1 Parent(s): fc86948

feat:first commit

Browse files
.codespellignore ADDED
File without changes
.gitignore ADDED
@@ -0,0 +1,164 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # Byte-compiled / optimized / DLL files
2
+ __pycache__/
3
+ *.py[cod]
4
+ *$py.class
5
+
6
+ # C extensions
7
+ *.so
8
+
9
+ # Distribution / packaging
10
+ .Python
11
+ build/
12
+ develop-eggs/
13
+ dist/
14
+ downloads/
15
+ eggs/
16
+ .eggs/
17
+ lib/
18
+ lib64/
19
+ parts/
20
+ sdist/
21
+ var/
22
+ wheels/
23
+ share/python-wheels/
24
+ *.egg-info/
25
+ .installed.cfg
26
+ *.egg
27
+ MANIFEST
28
+
29
+ # PyInstaller
30
+ # Usually these files are written by a python script from a template
31
+ # before PyInstaller builds the exe, so as to inject date/other infos into it.
32
+ *.manifest
33
+ *.spec
34
+
35
+ # Installer logs
36
+ pip-log.txt
37
+ pip-delete-this-directory.txt
38
+
39
+ # Unit test / coverage reports
40
+ htmlcov/
41
+ .tox/
42
+ .nox/
43
+ .coverage
44
+ .coverage.*
45
+ .cache
46
+ nosetests.xml
47
+ coverage.xml
48
+ *.cover
49
+ *.py,cover
50
+ .hypothesis/
51
+ .pytest_cache/
52
+ cover/
53
+
54
+ # Translations
55
+ *.mo
56
+ *.pot
57
+
58
+ # Django stuff:
59
+ *.log
60
+ local_settings.py
61
+ db.sqlite3
62
+ db.sqlite3-journal
63
+
64
+ # Flask stuff:
65
+ instance/
66
+ .webassets-cache
67
+
68
+ # Scrapy stuff:
69
+ .scrapy
70
+
71
+ # Sphinx documentation
72
+ docs/_build/
73
+
74
+ # PyBuilder
75
+ .pybuilder/
76
+ target/
77
+
78
+ # Jupyter Notebook
79
+ .ipynb_checkpoints
80
+
81
+ # IPython
82
+ profile_default/
83
+ ipython_config.py
84
+
85
+ # pyenv
86
+ # For a library or package, you might want to ignore these files since the code is
87
+ # intended to run in multiple environments; otherwise, check them in:
88
+ # .python-version
89
+
90
+ # pipenv
91
+ # According to pypa/pipenv#598, it is recommended to include Pipfile.lock in version control.
92
+ # However, in case of collaboration, if having platform-specific dependencies or dependencies
93
+ # having no cross-platform support, pipenv may install dependencies that don't work, or not
94
+ # install all needed dependencies.
95
+ #Pipfile.lock
96
+
97
+ # poetry
98
+ # Similar to Pipfile.lock, it is generally recommended to include poetry.lock in version control.
99
+ # This is especially recommended for binary packages to ensure reproducibility, and is more
100
+ # commonly ignored for libraries.
101
+ # https://python-poetry.org/docs/basic-usage/#commit-your-poetrylock-file-to-version-control
102
+ #poetry.lock
103
+
104
+ # pdm
105
+ # Similar to Pipfile.lock, it is generally recommended to include pdm.lock in version control.
106
+ #pdm.lock
107
+ # pdm stores project-wide configurations in .pdm.toml, but it is recommended to not include it
108
+ # in version control.
109
+ # https://pdm.fming.dev/latest/usage/project/#working-with-version-control
110
+ .pdm.toml
111
+ .pdm-python
112
+ .pdm-build/
113
+
114
+ # PEP 582; used by e.g. github.com/David-OConnor/pyflow and github.com/pdm-project/pdm
115
+ __pypackages__/
116
+
117
+ # Celery stuff
118
+ celerybeat-schedule
119
+ celerybeat.pid
120
+
121
+ # SageMath parsed files
122
+ *.sage.py
123
+
124
+ # Environments
125
+ .env
126
+ .venv
127
+ env/
128
+ venv/
129
+ ENV/
130
+ env.bak/
131
+ venv.bak/
132
+
133
+ # Spyder project settings
134
+ .spyderproject
135
+ .spyproject
136
+
137
+ # Rope project settings
138
+ .ropeproject
139
+
140
+ # mkdocs documentation
141
+ /site
142
+
143
+ # mypy
144
+ .mypy_cache/
145
+ .dmypy.json
146
+ dmypy.json
147
+
148
+ # Pyre type checker
149
+ .pyre/
150
+
151
+ # pytype static type analyzer
152
+ .pytype/
153
+
154
+ # Cython debug symbols
155
+ cython_debug/
156
+
157
+ # PyCharm
158
+ # JetBrains specific template is maintained in a separate JetBrains.gitignore that can
159
+ # be found at https://github.com/github/gitignore/blob/main/Global/JetBrains.gitignore
160
+ # and can be added to the global gitignore or merged into this file. For a more nuclear
161
+ # option (not recommended) you can uncomment the following to ignore the entire idea folder.
162
+ #.idea/
163
+ uv.lock
164
+ .langgraph_api/
LICENSE ADDED
@@ -0,0 +1,21 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ MIT License
2
+
3
+ Copyright (c) 2024 LangChain
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
Makefile ADDED
@@ -0,0 +1,67 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ .PHONY: all format lint test tests test_watch integration_tests docker_tests help extended_tests
2
+
3
+ # Default target executed when no arguments are given to make.
4
+ all: help
5
+
6
+ # Define a variable for the test file path.
7
+ TEST_FILE ?= tests/unit_tests/
8
+
9
+ test:
10
+ python -m pytest $(TEST_FILE)
11
+
12
+ integration_tests:
13
+ python -m pytest tests/integration_tests
14
+
15
+ test_watch:
16
+ python -m ptw --snapshot-update --now . -- -vv tests/unit_tests
17
+
18
+ test_profile:
19
+ python -m pytest -vv tests/unit_tests/ --profile-svg
20
+
21
+ extended_tests:
22
+ python -m pytest --only-extended $(TEST_FILE)
23
+
24
+
25
+ ######################
26
+ # LINTING AND FORMATTING
27
+ ######################
28
+
29
+ # Define a variable for Python and notebook files.
30
+ PYTHON_FILES=src/
31
+ MYPY_CACHE=.mypy_cache
32
+ lint format: PYTHON_FILES=.
33
+ lint_diff format_diff: PYTHON_FILES=$(shell git diff --name-only --diff-filter=d main | grep -E '\.py$$|\.ipynb$$')
34
+ lint_package: PYTHON_FILES=src
35
+ lint_tests: PYTHON_FILES=tests
36
+ lint_tests: MYPY_CACHE=.mypy_cache_test
37
+
38
+ lint lint_diff lint_package lint_tests:
39
+ python -m ruff check .
40
+ [ "$(PYTHON_FILES)" = "" ] || python -m ruff format $(PYTHON_FILES) --diff
41
+ [ "$(PYTHON_FILES)" = "" ] || python -m ruff check --select I $(PYTHON_FILES)
42
+ [ "$(PYTHON_FILES)" = "" ] || python -m mypy --strict $(PYTHON_FILES)
43
+ [ "$(PYTHON_FILES)" = "" ] || mkdir -p $(MYPY_CACHE) && python -m mypy --strict $(PYTHON_FILES) --cache-dir $(MYPY_CACHE)
44
+
45
+ format format_diff:
46
+ ruff format $(PYTHON_FILES)
47
+ ruff check --select I --fix $(PYTHON_FILES)
48
+
49
+ spell_check:
50
+ codespell --toml pyproject.toml
51
+
52
+ spell_fix:
53
+ codespell --toml pyproject.toml -w
54
+
55
+ ######################
56
+ # HELP
57
+ ######################
58
+
59
+ help:
60
+ @echo '----'
61
+ @echo 'format - run code formatters'
62
+ @echo 'lint - run linters'
63
+ @echo 'test - run unit tests'
64
+ @echo 'tests - run unit tests'
65
+ @echo 'test TEST_FILE=<test_file> - run all tests in file'
66
+ @echo 'test_watch - run unit tests in watch mode'
67
+
README.md CHANGED
@@ -1,13 +1,80 @@
1
- ---
2
- title: ImobIA
3
- emoji: 👁
4
- colorFrom: gray
5
- colorTo: pink
6
- sdk: gradio
7
- sdk_version: 5.40.0
8
- app_file: app.py
9
- pinned: false
10
- short_description: primeira versão imobIA
11
- ---
12
-
13
- Check out the configuration reference at https://huggingface.co/docs/hub/spaces-config-reference
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ # New LangGraph Project
2
+
3
+ [![CI](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/unit-tests.yml/badge.svg)](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/unit-tests.yml)
4
+ [![Integration Tests](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/integration-tests.yml/badge.svg)](https://github.com/langchain-ai/new-langgraph-project/actions/workflows/integration-tests.yml)
5
+
6
+ This template demonstrates a simple application implemented using [LangGraph](https://github.com/langchain-ai/langgraph), designed for showing how to get started with [LangGraph Server](https://langchain-ai.github.io/langgraph/concepts/langgraph_server/#langgraph-server) and using [LangGraph Studio](https://langchain-ai.github.io/langgraph/concepts/langgraph_studio/), a visual debugging IDE.
7
+
8
+ <div align="center">
9
+ <img src="./static/studio_ui.png" alt="Graph view in LangGraph studio UI" width="75%" />
10
+ </div>
11
+
12
+ The core logic defined in `src/agent/graph.py`, showcases an single-step application that responds with a fixed string and the configuration provided.
13
+
14
+ You can extend this graph to orchestrate more complex agentic workflows that can be visualized and debugged in LangGraph Studio.
15
+
16
+ ## Getting Started
17
+
18
+ <!--
19
+ Setup instruction auto-generated by `langgraph template lock`. DO NOT EDIT MANUALLY.
20
+ -->
21
+
22
+ <!--
23
+ End setup instructions
24
+ -->
25
+
26
+ 1. Install dependencies, along with the [LangGraph CLI](https://langchain-ai.github.io/langgraph/concepts/langgraph_cli/), which will be used to run the server.
27
+
28
+ ```bash
29
+ cd path/to/your/app
30
+ pip install -e . "langgraph-cli[inmem]"
31
+ ```
32
+
33
+ 2. (Optional) Customize the code and project as needed. Create a `.env` file if you need to use secrets.
34
+
35
+ ```bash
36
+ cp .env.example .env
37
+ ```
38
+
39
+ If you want to enable LangSmith tracing, add your LangSmith API key to the `.env` file.
40
+
41
+ ```text
42
+ # .env
43
+ LANGSMITH_API_KEY=lsv2...
44
+ ```
45
+
46
+ 3. Start the LangGraph Server.
47
+
48
+ ```shell
49
+ langgraph dev
50
+ ```
51
+
52
+ For more information on getting started with LangGraph Server, [see here](https://langchain-ai.github.io/langgraph/tutorials/langgraph-platform/local-server/).
53
+
54
+ ## How to customize
55
+
56
+ 1. **Define configurable parameters**: Modify the `Configuration` class in the `graph.py` file to expose the arguments you want to configure. For example, in a chatbot application you may want to define a dynamic system prompt or LLM to use. For more information on configurations in LangGraph, [see here](https://langchain-ai.github.io/langgraph/concepts/low_level/?h=configuration#configuration).
57
+
58
+ 2. **Extend the graph**: The core logic of the application is defined in [graph.py](./src/agent/graph.py). You can modify this file to add new nodes, edges, or change the flow of information.
59
+
60
+ ## Development
61
+
62
+ While iterating on your graph in LangGraph Studio, you can edit past state and rerun your app from previous states to debug specific nodes. Local changes will be automatically applied via hot reload.
63
+
64
+ Follow-up requests extend the same thread. You can create an entirely new thread, clearing previous history, using the `+` button in the top right.
65
+
66
+ For more advanced features and examples, refer to the [LangGraph documentation](https://langchain-ai.github.io/langgraph/). These resources can help you adapt this template for your specific use case and build more sophisticated conversational agents.
67
+
68
+ LangGraph Studio also integrates with [LangSmith](https://smith.langchain.com/) for more in-depth tracing and collaboration with teammates, allowing you to analyze and optimize your chatbot's performance.
69
+
70
+ <!--
71
+ Configuration auto-generated by `langgraph template lock`. DO NOT EDIT MANUALLY.
72
+ {
73
+ "config_schemas": {
74
+ "agent": {
75
+ "type": "object",
76
+ "properties": {}
77
+ }
78
+ }
79
+ }
80
+ -->
pyproject.toml ADDED
@@ -0,0 +1,65 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ [project]
2
+ name = "agent"
3
+ version = "0.0.1"
4
+ description = "Starter template for making a new agent LangGraph."
5
+ authors = [
6
+ { name = "William Fu-Hinthorn", email = "13333726+hinthornw@users.noreply.github.com" },
7
+ ]
8
+ readme = "README.md"
9
+ license = { text = "MIT" }
10
+ requires-python = ">=3.9"
11
+ dependencies = [
12
+ "langgraph>=0.2.6",
13
+ "python-dotenv>=1.0.1",
14
+ ]
15
+
16
+
17
+ [project.optional-dependencies]
18
+ dev = ["mypy>=1.11.1", "ruff>=0.6.1"]
19
+
20
+ [build-system]
21
+ requires = ["setuptools>=73.0.0", "wheel"]
22
+ build-backend = "setuptools.build_meta"
23
+
24
+ [tool.setuptools]
25
+ packages = ["langgraph.templates.agent", "agent"]
26
+ [tool.setuptools.package-dir]
27
+ "langgraph.templates.agent" = "src/agent"
28
+ "agent" = "src/agent"
29
+
30
+
31
+ [tool.setuptools.package-data]
32
+ "*" = ["py.typed"]
33
+
34
+ [tool.ruff]
35
+ lint.select = [
36
+ "E", # pycodestyle
37
+ "F", # pyflakes
38
+ "I", # isort
39
+ "D", # pydocstyle
40
+ "D401", # First line should be in imperative mood
41
+ "T201",
42
+ "UP",
43
+ ]
44
+ lint.ignore = [
45
+ "UP006",
46
+ "UP007",
47
+ # We actually do want to import from typing_extensions
48
+ "UP035",
49
+ # Relax the convention by _not_ requiring documentation for every function parameter.
50
+ "D417",
51
+ "E501",
52
+ ]
53
+ [tool.ruff.lint.per-file-ignores]
54
+ "tests/*" = ["D", "UP"]
55
+ [tool.ruff.lint.pydocstyle]
56
+ convention = "google"
57
+
58
+ [dependency-groups]
59
+ dev = [
60
+ "anyio>=4.7.0",
61
+ "langgraph-cli[inmem]>=0.2.8",
62
+ "mypy>=1.13.0",
63
+ "pytest>=8.3.5",
64
+ "ruff>=0.8.2",
65
+ ]
src/agent/__init__.py ADDED
@@ -0,0 +1,8 @@
 
 
 
 
 
 
 
 
 
1
+ """New LangGraph Agent.
2
+
3
+ This module defines a custom graph.
4
+ """
5
+
6
+ from agent.graph import graph
7
+
8
+ __all__ = ["graph"]
src/agent/agent_main.py ADDED
@@ -0,0 +1,81 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ from langchain.chat_models import ChatOpenAI
2
+ from langchain.agents import AgentExecutor, tool
3
+ from langchain.agents import initialize_agent
4
+ from langchain.tools import Tool
5
+ from langchain.chains import RetrievalQA
6
+ from langchain.vectorstores import FAISS
7
+ from langchain.embeddings import OpenAIEmbeddings
8
+ from dotenv import load_dotenv
9
+ import os
10
+ import boto3
11
+
12
+
13
+ load_dotenv() # Carrega as variáveis do .env
14
+
15
+ openai_api_key = os.getenv("OPENAI_API_KEY")
16
+ aws_access_key_id = os.getenv("aws_access_key_id")
17
+ aws_secret_access_key = os.getenv("aws_secret_access_key")
18
+
19
+
20
+ def baixar_faiss_do_s3(bucket_name, local_dir, s3_prefix=""):
21
+ s3 = boto3.resource('s3',
22
+ aws_access_key_id = aws_access_key_id,
23
+ aws_secret_access_key = aws_secret_access_key
24
+ )
25
+ baldinho = s3.Bucket(bucket_name)
26
+ os.makedirs(local_dir, exist_ok=True)
27
+ for fname in ["index.faiss", "index.pkl"]:
28
+ s3_path = f"{s3_prefix}/{fname}" if s3_prefix else fname
29
+ local_path = os.path.join(local_dir, fname)
30
+ baldinho.download_file(s3_path, local_path)
31
+
32
+ # Caminho temporário local
33
+ VECTOR_DIR = "temp_faiss"
34
+
35
+ # 🔁 Baixa do S3
36
+ baixar_faiss_do_s3("imobia-app", VECTOR_DIR)
37
+
38
+ # 🔗 Carrega o vetor FAISS a partir da pasta local
39
+ embedding = OpenAIEmbeddings()
40
+ vectorstore = FAISS.load_local(VECTOR_DIR, embedding,allow_dangerous_deserialization=True)
41
+
42
+ # RAG tool
43
+ @tool
44
+ def consultar_empreendimentos(pergunta: str) -> str:
45
+ """Consulta a base de dados de empreendimentos imobiliários para responder perguntas sobre imóveis, preços, localização, metragem, diferenciais ou condições."""
46
+ chain = RetrievalQA.from_chain_type(
47
+ llm=ChatOpenAI(model="gpt-4o-mini", temperature=0),
48
+ retriever=vectorstore.as_retriever()
49
+ )
50
+ return chain.run(pergunta)
51
+
52
+ tools = [consultar_empreendimentos]
53
+
54
+ # LLM com suporte a function calling
55
+ llm = ChatOpenAI(model="gpt-4o-mini", temperature=0)
56
+
57
+ # Prompt do agente
58
+ system_msg = """
59
+ Você é um corretor de imóveis especializado em empreendimentos imobiliarios. Seja cordial e educado, sua função é
60
+ convencer o cliente a conhecer os empreendimentos e preparar a venda para o corretor.
61
+
62
+ Sua função é ajudar o cliente com informações sobre localização, valores, metragem, diferenciais e condições de pagamento.
63
+
64
+ Se a pergunta for sobre esses empreendimentos, use a ferramenta de consulta para responder com precisão.
65
+
66
+ Nunca fale sobre assuntos fora do mercado imobiliário. Seja sempre educado, profissional e objetivo.
67
+
68
+ No final de seu discurso sempre tente oferecer ajuda para entender melhor sobre os empreendimentos, codições de pagamento,
69
+ seja útil e claro.
70
+ """
71
+
72
+ agent = initialize_agent(
73
+ tools=tools,
74
+ llm=llm,
75
+ agent_type="openai-functions",
76
+ verbose=True,
77
+ agent_kwargs={"system_message": system_msg}
78
+ )
79
+
80
+ def responder_pergunta(pergunta):
81
+ return agent.run(pergunta)
src/agent/app.py ADDED
@@ -0,0 +1,16 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import gradio as gr
2
+ from agent_main import responder_pergunta
3
+
4
+ with gr.Blocks() as demo:
5
+ gr.Markdown("# 🏡 imobIA")
6
+ chatbot = gr.Chatbot()
7
+ msg = gr.Textbox(label="Como posso te ajudar hoje?")
8
+
9
+ def responder(chat_history, user_input):
10
+ resposta = responder_pergunta(user_input)
11
+ chat_history.append((user_input, resposta))
12
+ return chat_history, ""
13
+
14
+ msg.submit(responder, [chatbot, msg], [chatbot, msg])
15
+
16
+ demo.launch()
tests/conftest.py ADDED
@@ -0,0 +1,6 @@
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+
4
+ @pytest.fixture(scope="session")
5
+ def anyio_backend():
6
+ return "asyncio"
tests/integration_tests/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ """Define any integration tests you want in this directory."""
tests/integration_tests/test_graph.py ADDED
@@ -0,0 +1,12 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ import pytest
2
+
3
+ from agent import graph
4
+
5
+ pytestmark = pytest.mark.anyio
6
+
7
+
8
+ @pytest.mark.langsmith
9
+ async def test_agent_simple_passthrough() -> None:
10
+ inputs = {"changeme": "some_val"}
11
+ res = await graph.ainvoke(inputs)
12
+ assert res is not None
tests/unit_tests/__init__.py ADDED
@@ -0,0 +1 @@
 
 
1
+ """Define any unit tests you may want in this directory."""
tests/unit_tests/test_configuration.py ADDED
@@ -0,0 +1,9 @@
 
 
 
 
 
 
 
 
 
 
1
+ from langgraph.pregel import Pregel
2
+
3
+ from agent.graph import graph
4
+
5
+
6
+ def test_placeholder() -> None:
7
+ # TODO: You can add actual unit tests
8
+ # for your graph and other logic here.
9
+ assert isinstance(graph, Pregel)