index
int64 0
0
| repo_id
stringclasses 596
values | file_path
stringlengths 31
168
| content
stringlengths 1
6.2M
|
|---|---|---|---|
0
|
lc_public_repos
|
lc_public_repos/langchain-extract/docker-compose.yml
|
name: langchain-extract
services:
postgres:
# Careful if bumping postgres version.
# Make sure to keep in sync with CI
# version if being tested on CI.
image: postgres:16
expose:
- "5432"
ports:
- "5432:5432"
environment:
POSTGRES_DB: langchain
POSTGRES_USER: langchain
POSTGRES_PASSWORD: langchain
healthcheck:
test: ["CMD-SHELL", "pg_isready -U langchain -d langchain -W langchain"]
interval: 10s
timeout: 5s
retries: 5
volumes:
- postgres_data:/var/lib/postgresql/data
backend:
build:
context: .
dockerfile: ./backend/Dockerfile
target: development
env_file:
- .local.env
environment:
- PG_HOST=postgres
# Define CORS origins for dev work on UI
- CORS_ORIGINS=http://localhost:3000
ports:
- "8000:8000" # Backend is accessible on localhost:8100
depends_on:
- postgres
volumes:
- ./backend:/backend
frontend:
build:
context: ./frontend
dockerfile: ./Dockerfile
target: development
ports:
- "3000:3000"
environment:
- NODE_ENV=development
volumes:
- ./frontend:/app
- /app/node_modules
depends_on:
- backend
volumes:
postgres_data:
|
0
|
lc_public_repos
|
lc_public_repos/langchain-extract/Dockerfile
|
# All directory paths for COPY commands are relative to the build context
# Ensure this python version stays in sync with CI
FROM python:3.11-slim as base
WORKDIR /backend
# set environment variables
ENV PYTHONDONTWRITEBYTECODE 1
ENV PYTHONUNBUFFERED 1
ENV POETRY_HOME="/opt/poetry"
ENV MYPYPATH="/app/src/stubs"
# Use bash as the shell for the build
# https://github.com/docker/for-linux/issues/408#issuecomment-414748815
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
RUN set -eux && \
apt-get update && \
apt-get install -y \
build-essential \
curl \
libpq-dev \
python3-dev \
libmagic1
# https://python-poetry.org/docs
RUN pip install poetry
# install deps before copying project files so the cache is only invalidated
# when the deps change
COPY ./backend/pyproject.toml ./backend/poetry.lock ./
RUN poetry config virtualenvs.create false
RUN poetry install --no-root --only main
COPY ./backend .
EXPOSE 8080
###
# development image
###
FROM base as development
ENTRYPOINT ["bash", "./scripts/prod_entry_point.sh"]
|
0
|
lc_public_repos
|
lc_public_repos/langchain-extract/LICENSE
|
MIT License
Copyright (c) 2024-Present Langchain AI
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
|
0
|
lc_public_repos
|
lc_public_repos/langchain-extract/README.md
|
π§ Under Active Development π§
This repo is under active developments. Do not use code from `main`. Instead please checkout code from [releases](https://github.com/langchain-ai/langchain-extract/releases)
This repository is not a library, but a jumping point for your own application -- so do not be surprised to find breaking changes between releases!
Checkout the demo service deployed at [extract.langchain.com/](https://extract.langchain.com/).
# π¦βοΈ LangChain Extract
https://github.com/langchain-ai/langchain-extract/assets/26529506/6657280e-d05f-4c0f-9c47-07a0ef7c559d
[](https://github.com/langchain-ai/langchain-extract/actions/workflows/ci.yml)
[](https://opensource.org/licenses/MIT)
[](https://twitter.com/langchainai)
[](https://discord.gg/6adMQxSpJS)
[](https://github.com/langchain-ai/langchain-extract/issues)
`langchain-extract` is a simple web server that allows you to extract information from text and files using LLMs. It is build using [FastAPI](https://fastapi.tiangolo.com/), [LangChain](https://python.langchain.com/) and [Postgresql](https://www.postgresql.org/).
The backend closely follows the [extraction use-case documentation](https://python.langchain.com/docs/use_cases/extraction) and provides
a reference implementation of an app that helps to do extraction over data using LLMs.
This repository is meant to be a starting point for building your own extraction application which
may have slightly different requirements or use cases.
## Functionality
- π FastAPI webserver with a REST API
- π OpenAPI Documentation
- π Use [JSON Schema](https://json-schema.org/) to define what to extract
- π Use examples to improve the quality of extracted results
- π¦ Create and save extractors and examples in a database
- π Extract information from text and/or binary files
- π¦οΈπ [LangServe](https://github.com/langchain-ai/langserve) endpoint to integrate with LangChain `RemoteRunnnable`
## Releases:
0.0.1: https://github.com/langchain-ai/langchain-extract/releases/tag/0.0.1
0.0.2: https://github.com/langchain-ai/langchain-extract/releases/tag/0.0.2
## π Documentation
See the example notebooks in the [documentation](https://github.com/langchain-ai/langchain-extract/tree/main/docs/source/notebooks)
to see how to create examples to improve extraction results, upload files (e.g., HTML, PDF) and more.
Documentation and server code are both under development!
## π― Example API
Below are two sample `curl` requests to demonstrate how to use the API.
These only provide minimal examples of how to use the API,
see the [documentation](https://github.com/langchain-ai/langchain-extract/tree/main/docs/source/notebooks) for more information
about the API and the [extraction use-case documentation](https://python.langchain.com/docs/use_cases/extraction) for more information about how to extract
information using LangChain.
First we generate a user ID for ourselves. **The application does not properly manage users or include legitimate authentication**. Access to extractors, few-shot examples, and other artifacts is controlled via this ID. Consider it secret.
```sh
USER_ID=$(uuidgen)
export USER_ID
```
### Create an extractor
```sh
curl -X 'POST' \
'http://localhost:8000/extractors' \
-H 'accept: application/json' \
-H 'Content-Type: application/json' \
-H "x-key: ${USER_ID}" \
-d '{
"name": "Personal Information",
"description": "Use to extract personal information",
"schema": {
"type": "object",
"title": "Person",
"required": [
"name",
"age"
],
"properties": {
"age": {
"type": "integer",
"title": "Age"
},
"name": {
"type": "string",
"title": "Name"
}
}
},
"instruction": "Use information about the person from the given user input."
}'
```
Response:
```json
{
"uuid": "e07f389f-3577-4e94-bd88-6b201d1b10b9"
}
```
Use the extract endpoint to extract information from the text (or a file)
using an existing pre-defined extractor.
```sh
curl -s -X 'POST' \
'http://localhost:8000/extract' \
-H 'accept: application/json' \
-H 'Content-Type: multipart/form-data' \
-H "x-key: ${USER_ID}" \
-F 'extractor_id=e07f389f-3577-4e94-bd88-6b201d1b10b9' \
-F 'text=my name is chester and i am 20 years old. My name is eugene and I am 1 year older than chester.' \
-F 'mode=entire_document' \
-F 'file=' | jq .
```
Response:
```json
{
"data": [
{
"name": "chester",
"age": 20
},
{
"name": "eugene",
"age": 21
}
]
}
```
Add a few shot example:
```sh
curl -X POST "http://localhost:8000/examples" \
-H "Content-Type: application/json" \
-H "x-key: ${USER_ID}" \
-d '{
"extractor_id": "e07f389f-3577-4e94-bd88-6b201d1b10b9",
"content": "marcos is 10.",
"output": [
{
"name": "MARCOS",
"age": 10
}
]
}' | jq .
```
The response will contain a UUID for the example. Examples can be deleted with a DELETE request. This example is now persisted and associated with our extractor, and subsequent extraction runs will incorporate it.
## β
Running locally
The easiest way to get started is to use `docker-compose` to run the server.
**Configure the environment**
Add `.local.env` file to the root directory with the following content:
```sh
OPENAI_API_KEY=... # Your OpenAI API key
```
Adding `FIREWORKS_API_KEY` or `TOGETHER_API_KEY` to this file would enable additional models. You can access available models for the server and other information via a `GET` request to the `configuration` endpoint.
Build the images:
```sh
docker compose build
```
Run the services:
```sh
docker compose up
```
This will launch both the extraction server and the postgres instance.
Verify that the server is running:
```sh
curl -X 'GET' 'http://localhost:8000/ready'
```
This should return `ok`.
The UI will be available at [http://localhost:3000](http://localhost:3000).
## Contributions
Feel free to develop in this project for your own needs!
For now, we are not accepting pull requests, but would love to hear [questions, ideas or issues](https://github.com/langchain-ai/langchain-extract/discussions).
## Development
To set up for development, you will need to install [Poetry](https://python-poetry.org/).
The backend code is located in the `backend` directory.
```sh
cd backend
```
Set up the environment using poetry:
```sh
poetry install --with lint,dev,test
```
Run the following script to create a database and schema:
```sh
python -m scripts.run_migrations create
```
From `/backend`:
```sh
OPENAI_API_KEY=[YOUR API KEY] python -m server.main
```
### Testing
Create a test database. The test database is used for running tests and is
separate from the main database. It will have the same schema as the main
database.
```sh
python -m scripts.run_migrations create-test-db
```
Run the tests
```sh
make test
```
### Linting and format
Testing and formatting is done using a Makefile inside `[root]/backend`
```sh
make format
```
|
0
|
lc_public_repos/langchain-extract
|
lc_public_repos/langchain-extract/frontend/Dockerfile
|
FROM node:18-alpine AS base
FROM base AS base-deps
WORKDIR /app
COPY --link ./yarn.lock ./package.json ./.yarnrc.yml ./
FROM base AS installer
WORKDIR /app
COPY --link --from=base-deps /app/package.json ./package.json
COPY --link --from=base-deps /app/yarn.lock ./yarn.lock
COPY --link .yarnrc.yml .
RUN yarn install
FROM base AS builder
WORKDIR /app
COPY --link --from=installer /app .
COPY --link tsconfig.json tsconfig.json
RUN yarn build
FROM base AS development
WORKDIR /app
COPY --link --from=installer /app .
ENV NODE_ENV=development
CMD ["yarn", "dev"]
|
0
|
lc_public_repos/langchain-extract
|
lc_public_repos/langchain-extract/frontend/tsconfig.json
|
{
"compilerOptions": {
"target": "es5",
"lib": ["dom", "dom.iterable", "esnext"],
"allowJs": true,
"skipLibCheck": true,
"strict": true,
"forceConsistentCasingInFileNames": true,
"noEmit": true,
"esModuleInterop": true,
"module": "esnext",
"moduleResolution": "node",
"resolveJsonModule": true,
"isolatedModules": true,
"jsx": "preserve",
"incremental": true,
"baseUrl": "./",
"plugins": [
{
"name": "next"
}
],
"paths": {
"@/*": ["./*"]
}
},
"include": ["next-env.d.ts", "**/*.ts", "**/*.tsx", ".next/types/**/*.ts"],
"exclude": ["node_modules"]
}
|
0
|
lc_public_repos/langchain-extract
|
lc_public_repos/langchain-extract/frontend/yarn.lock
| "# This file is generated by running \"yarn install\" inside your project.\n# Manual changes might b(...TRUNCATED)
|
0
|
lc_public_repos/langchain-extract
|
lc_public_repos/langchain-extract/frontend/.eslintrc.json
| "{\n \"plugins\": [\"react\", \"@typescript-eslint\", \"eslint-plugin-import\"],\n \"env\": {\n (...TRUNCATED)
|
0
|
lc_public_repos/langchain-extract
|
lc_public_repos/langchain-extract/frontend/.yarnrc.yml
|
nodeLinker: node-modules
|
0
|
lc_public_repos/langchain-extract
|
lc_public_repos/langchain-extract/frontend/.env.example
| "# Only set for non development builds.\n# Development builds default to `http://localhost:8000`\nNE(...TRUNCATED)
|
End of preview. Expand
in Data Studio
README.md exists but content is empty.
- Downloads last month
- 1