* bump to version 0.2.0 of llm-guard
Browse files- README.md +3 -3
- app.py +4 -4
- output.py +1 -1
- prompt.py +1 -1
- requirements.txt +1 -1
README.md
CHANGED
|
@@ -1,5 +1,5 @@
|
|
| 1 |
---
|
| 2 |
-
title: LLM Guard
|
| 3 |
emoji: 🏢
|
| 4 |
colorFrom: blue
|
| 5 |
colorTo: gray
|
|
@@ -11,11 +11,11 @@ license: mit
|
|
| 11 |
Here's a simple app, written in pure Python, to create a demo website for LLM Guard.
|
| 12 |
The app is based on the [streamlit](https://streamlit.io/) package.
|
| 13 |
|
| 14 |
-
A live version can be found here: https://huggingface.co/spaces/laiyer/llm-guard-
|
| 15 |
|
| 16 |
## Requirements
|
| 17 |
|
| 18 |
-
1. Clone the repo and move to the `examples/
|
| 19 |
|
| 20 |
2. Install dependencies (preferably in a virtual environment)
|
| 21 |
|
|
|
|
| 1 |
---
|
| 2 |
+
title: LLM Guard Playground
|
| 3 |
emoji: 🏢
|
| 4 |
colorFrom: blue
|
| 5 |
colorTo: gray
|
|
|
|
| 11 |
Here's a simple app, written in pure Python, to create a demo website for LLM Guard.
|
| 12 |
The app is based on the [streamlit](https://streamlit.io/) package.
|
| 13 |
|
| 14 |
+
A live version can be found here: https://huggingface.co/spaces/laiyer/llm-guard-playground
|
| 15 |
|
| 16 |
## Requirements
|
| 17 |
|
| 18 |
+
1. Clone the repo and move to the `examples/playground` folder
|
| 19 |
|
| 20 |
2. Install dependencies (preferably in a virtual environment)
|
| 21 |
|
app.py
CHANGED
|
@@ -21,7 +21,7 @@ OUTPUT = "output"
|
|
| 21 |
vault = Vault()
|
| 22 |
|
| 23 |
st.set_page_config(
|
| 24 |
-
page_title="LLM Guard
|
| 25 |
layout="wide",
|
| 26 |
initial_sidebar_state="expanded",
|
| 27 |
menu_items={
|
|
@@ -29,7 +29,7 @@ st.set_page_config(
|
|
| 29 |
},
|
| 30 |
)
|
| 31 |
|
| 32 |
-
logger = logging.getLogger("llm-guard-
|
| 33 |
logger.setLevel(logging.INFO)
|
| 34 |
|
| 35 |
# Sidebar
|
|
@@ -49,7 +49,7 @@ elif scanner_type == OUTPUT:
|
|
| 49 |
enabled_scanners, settings = init_output_settings()
|
| 50 |
|
| 51 |
# Main pannel
|
| 52 |
-
with st.expander("About
|
| 53 |
st.info(
|
| 54 |
"""LLM-Guard is a comprehensive tool designed to fortify the security of Large Language Models (LLMs).
|
| 55 |
\n\n[Code](https://github.com/laiyer-ai/llm-guard) |
|
|
@@ -127,7 +127,7 @@ except Exception as e:
|
|
| 127 |
|
| 128 |
# After:
|
| 129 |
if st_is_valid is not None:
|
| 130 |
-
st.subheader(f"Results - {'valid' if st_is_valid else 'invalid'} ({st_time_delta.total_seconds()} seconds)")
|
| 131 |
|
| 132 |
col1, col2 = st.columns(2)
|
| 133 |
|
|
|
|
| 21 |
vault = Vault()
|
| 22 |
|
| 23 |
st.set_page_config(
|
| 24 |
+
page_title="LLM Guard Playground",
|
| 25 |
layout="wide",
|
| 26 |
initial_sidebar_state="expanded",
|
| 27 |
menu_items={
|
|
|
|
| 29 |
},
|
| 30 |
)
|
| 31 |
|
| 32 |
+
logger = logging.getLogger("llm-guard-playground")
|
| 33 |
logger.setLevel(logging.INFO)
|
| 34 |
|
| 35 |
# Sidebar
|
|
|
|
| 49 |
enabled_scanners, settings = init_output_settings()
|
| 50 |
|
| 51 |
# Main pannel
|
| 52 |
+
with st.expander("About", expanded=False):
|
| 53 |
st.info(
|
| 54 |
"""LLM-Guard is a comprehensive tool designed to fortify the security of Large Language Models (LLMs).
|
| 55 |
\n\n[Code](https://github.com/laiyer-ai/llm-guard) |
|
|
|
|
| 127 |
|
| 128 |
# After:
|
| 129 |
if st_is_valid is not None:
|
| 130 |
+
st.subheader(f"Results - {'valid' if st_is_valid else 'invalid'} ({round(st_time_delta.total_seconds())} seconds)")
|
| 131 |
|
| 132 |
col1, col2 = st.columns(2)
|
| 133 |
|
output.py
CHANGED
|
@@ -22,7 +22,7 @@ from llm_guard.output_scanners.sentiment import Sentiment
|
|
| 22 |
from llm_guard.output_scanners.toxicity import Toxicity
|
| 23 |
from llm_guard.vault import Vault
|
| 24 |
|
| 25 |
-
logger = logging.getLogger("llm-guard-
|
| 26 |
|
| 27 |
|
| 28 |
def init_settings() -> (List, Dict):
|
|
|
|
| 22 |
from llm_guard.output_scanners.toxicity import Toxicity
|
| 23 |
from llm_guard.vault import Vault
|
| 24 |
|
| 25 |
+
logger = logging.getLogger("llm-guard-playground")
|
| 26 |
|
| 27 |
|
| 28 |
def init_settings() -> (List, Dict):
|
prompt.py
CHANGED
|
@@ -18,7 +18,7 @@ from llm_guard.input_scanners import (
|
|
| 18 |
from llm_guard.input_scanners.anonymize import default_entity_types
|
| 19 |
from llm_guard.vault import Vault
|
| 20 |
|
| 21 |
-
logger = logging.getLogger("llm-guard-
|
| 22 |
|
| 23 |
|
| 24 |
def init_settings() -> (List, Dict):
|
|
|
|
| 18 |
from llm_guard.input_scanners.anonymize import default_entity_types
|
| 19 |
from llm_guard.vault import Vault
|
| 20 |
|
| 21 |
+
logger = logging.getLogger("llm-guard-playground")
|
| 22 |
|
| 23 |
|
| 24 |
def init_settings() -> (List, Dict):
|
requirements.txt
CHANGED
|
@@ -1,4 +1,4 @@
|
|
| 1 |
-
llm-guard==0.
|
| 2 |
pandas==2.1.0
|
| 3 |
streamlit==1.26.0
|
| 4 |
streamlit-tags==1.2.8
|
|
|
|
| 1 |
+
llm-guard==0.2.0
|
| 2 |
pandas==2.1.0
|
| 3 |
streamlit==1.26.0
|
| 4 |
streamlit-tags==1.2.8
|