LONGYKING commited on
Commit ·
ed3fa91
1
Parent(s): afb2857
update
Browse files- .chainlit/config.toml +127 -0
- .chainlit/translations/en-US.json +231 -0
- public/.DS_Store +0 -0
- public/coins-electronics-svgrepo-com.svg +90 -0
- public/favicon.ico +0 -0
- public/logo_dark.png +0 -0
- public/logo_light.png +0 -0
- public/news-svgrepo-com.svg +48 -0
- public/script.js +113 -0
- public/stockchart-svgrepo-com.svg +2 -0
- public/stylesheet.css +3 -0
- public/wallet-svgrepo-com.svg +15 -0
- src/.DS_Store +0 -0
- src/config/.DS_Store +0 -0
- src/config/nemoguardrails/config.yml +27 -0
- src/config/telzho-430515-c2622883c30e.json +13 -0
- src/data_sources/coin_gecko.py +266 -0
- src/data_sources/cryptocompare.py +325 -0
- src/data_sources/dexscreener.py +106 -0
- src/databases/.DS_Store +0 -0
- src/databases/postgres.py +4 -0
- src/databases/redis.py +132 -0
- src/knowledge_bases/.DS_Store +0 -0
- src/knowledge_bases/combined.py +60 -0
- src/knowledge_bases/json.py +63 -0
- src/knowledge_bases/pdf.py +57 -0
- src/knowledge_bases/pipeline.py +48 -0
- src/knowledge_bases/store_data.py +10 -0
- src/libs/constants.py +70 -0
- src/libs/google_drive.py +54 -0
- src/libs/helper_functions.py +215 -0
- src/libs/logger.py +17 -0
- src/libs/rpc_client.py +52 -0
- src/libs/s3fs.py +26 -0
- src/libs/token_approval_helper.py +61 -0
- src/libs/web3.py +8 -0
- src/llms/sourcegraph.py +126 -0
- src/search_services/exa.py +0 -0
- src/search_services/jina_ai.py +100 -0
- src/tools/cloudinary_toolkit.py +57 -0
- src/tools/crypto_data_toolkit.py +151 -0
- src/tools/crypto_evm_wallet_toolkit.py +97 -0
- src/tools/crypto_swap_toolkit.py +156 -0
- src/tools/notepad_toolkit.py +214 -0
- src/tools/user_profile_toolkit.py +185 -0
.chainlit/config.toml
ADDED
|
@@ -0,0 +1,127 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
[project]
|
| 2 |
+
# Whether to enable telemetry (default: true). No personal data is collected.
|
| 3 |
+
enable_telemetry = true
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
# List of environment variables to be provided by each user to use the app.
|
| 7 |
+
user_env = []
|
| 8 |
+
|
| 9 |
+
# Duration (in seconds) during which the session is saved when the connection is lost
|
| 10 |
+
session_timeout = 3600
|
| 11 |
+
|
| 12 |
+
# Enable third parties caching (e.g LangChain cache)
|
| 13 |
+
cache = false
|
| 14 |
+
|
| 15 |
+
# Authorized origins
|
| 16 |
+
allow_origins = ["*"]
|
| 17 |
+
|
| 18 |
+
# Follow symlink for asset mount (see https://github.com/Chainlit/chainlit/issues/317)
|
| 19 |
+
# follow_symlink = false
|
| 20 |
+
|
| 21 |
+
[features]
|
| 22 |
+
# Show the prompt playground
|
| 23 |
+
prompt_playground = true
|
| 24 |
+
|
| 25 |
+
# Process and display HTML in messages. This can be a security risk (see https://stackoverflow.com/questions/19603097/why-is-it-dangerous-to-render-user-generated-html-or-javascript)
|
| 26 |
+
unsafe_allow_html = false
|
| 27 |
+
|
| 28 |
+
# Process and display mathematical expressions. This can clash with "$" characters in messages.
|
| 29 |
+
latex = false
|
| 30 |
+
|
| 31 |
+
# Automatically tag threads with the current chat profile (if a chat profile is used)
|
| 32 |
+
auto_tag_thread = true
|
| 33 |
+
|
| 34 |
+
# Authorize users to spontaneously upload files with messages
|
| 35 |
+
[features.spontaneous_file_upload]
|
| 36 |
+
enabled = true
|
| 37 |
+
accept = ["*/*"]
|
| 38 |
+
max_files = 20
|
| 39 |
+
max_size_mb = 500
|
| 40 |
+
|
| 41 |
+
[features.audio]
|
| 42 |
+
# Threshold for audio recording
|
| 43 |
+
min_decibels = -45
|
| 44 |
+
# Delay for the user to start speaking in MS
|
| 45 |
+
initial_silence_timeout = 3000
|
| 46 |
+
# Delay for the user to continue speaking in MS. If the user stops speaking for this duration, the recording will stop.
|
| 47 |
+
silence_timeout = 1500
|
| 48 |
+
# Above this duration (MS), the recording will forcefully stop.
|
| 49 |
+
max_duration = 15000
|
| 50 |
+
# Duration of the audio chunks in MS
|
| 51 |
+
chunk_duration = 1000
|
| 52 |
+
# Sample rate of the audio
|
| 53 |
+
sample_rate = 44100
|
| 54 |
+
|
| 55 |
+
[UI]
|
| 56 |
+
# Name of the app and chatbot.
|
| 57 |
+
name = "Giino"
|
| 58 |
+
|
| 59 |
+
# Show the readme while the thread is empty.
|
| 60 |
+
show_readme_as_default = true
|
| 61 |
+
|
| 62 |
+
# Description of the app and chatbot. This is used for HTML tags.
|
| 63 |
+
# description = "AI-powered unification and execution protocol assitant for web3"
|
| 64 |
+
|
| 65 |
+
# Large size content are by default collapsed for a cleaner ui
|
| 66 |
+
default_collapse_content = true
|
| 67 |
+
|
| 68 |
+
# The default value for the expand messages settings.
|
| 69 |
+
default_expand_messages = false
|
| 70 |
+
|
| 71 |
+
# Hide the chain of thought details from the user in the UI.
|
| 72 |
+
hide_cot = false
|
| 73 |
+
|
| 74 |
+
# Link to your github repo. This will add a github button in the UI's header.
|
| 75 |
+
# github = ""
|
| 76 |
+
|
| 77 |
+
# Specify a CSS file that can be used to customize the user interface.
|
| 78 |
+
# The CSS file can be served from the public directory or via an external link.
|
| 79 |
+
# custom_css = "/public/test.css"
|
| 80 |
+
custom_css = '/public/stylesheet.css'
|
| 81 |
+
|
| 82 |
+
# Specify a Javascript file that can be used to customize the user interface.
|
| 83 |
+
# The Javascript file can be served from the public directory.
|
| 84 |
+
# custom_js = "/public/test.js"
|
| 85 |
+
custom_js = "/public/script.js"
|
| 86 |
+
|
| 87 |
+
# Specify a custom font url.
|
| 88 |
+
# custom_font = "https://fonts.googleapis.com/css2?family=Inter:wght@400;500;700&display=swap"
|
| 89 |
+
|
| 90 |
+
# Specify a custom meta image url.
|
| 91 |
+
# custom_meta_image_url = "https://chainlit-cloud.s3.eu-west-3.amazonaws.com/logo/chainlit_banner.png"
|
| 92 |
+
|
| 93 |
+
# Specify a custom build directory for the frontend.
|
| 94 |
+
# This can be used to customize the frontend code.
|
| 95 |
+
# Be careful: If this is a relative path, it should not start with a slash.
|
| 96 |
+
# custom_build = "./public/build"
|
| 97 |
+
|
| 98 |
+
[UI.theme]
|
| 99 |
+
layout = "wide"
|
| 100 |
+
font_family = "Inter, sans-serif"
|
| 101 |
+
# Override default MUI light theme. (Check theme.ts)
|
| 102 |
+
[UI.theme.light]
|
| 103 |
+
# background = "#FFFFFF"
|
| 104 |
+
paper = "#FFFFFF"
|
| 105 |
+
|
| 106 |
+
[UI.theme.light.primary]
|
| 107 |
+
main = "#007BFF"
|
| 108 |
+
dark = "#1C1C1C"
|
| 109 |
+
light = "#FAFAFA"
|
| 110 |
+
|
| 111 |
+
# Override default MUI dark theme. (Check theme.ts)
|
| 112 |
+
[UI.theme.dark]
|
| 113 |
+
background = "#1C1C1C"
|
| 114 |
+
paper = "#202020"
|
| 115 |
+
|
| 116 |
+
[UI.theme.dark.primary]
|
| 117 |
+
main = "#007BFF"
|
| 118 |
+
dark = "#1C1C1C"
|
| 119 |
+
light = "#202020"
|
| 120 |
+
|
| 121 |
+
|
| 122 |
+
#Button Color: #1C4DF0
|
| 123 |
+
#Button Color 2: #062C8A
|
| 124 |
+
|
| 125 |
+
|
| 126 |
+
[meta]
|
| 127 |
+
generated_by = "1.1.202"
|
.chainlit/translations/en-US.json
ADDED
|
@@ -0,0 +1,231 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"components": {
|
| 3 |
+
"atoms": {
|
| 4 |
+
"buttons": {
|
| 5 |
+
"userButton": {
|
| 6 |
+
"menu": {
|
| 7 |
+
"settings": "Settings",
|
| 8 |
+
"settingsKey": "S",
|
| 9 |
+
"APIKeys": "API Keys",
|
| 10 |
+
"logout": "Logout"
|
| 11 |
+
}
|
| 12 |
+
}
|
| 13 |
+
}
|
| 14 |
+
},
|
| 15 |
+
"molecules": {
|
| 16 |
+
"newChatButton": {
|
| 17 |
+
"newChat": "New Chat"
|
| 18 |
+
},
|
| 19 |
+
"tasklist": {
|
| 20 |
+
"TaskList": {
|
| 21 |
+
"title": "\ud83d\uddd2\ufe0f Task List",
|
| 22 |
+
"loading": "Loading...",
|
| 23 |
+
"error": "An error occured"
|
| 24 |
+
}
|
| 25 |
+
},
|
| 26 |
+
"attachments": {
|
| 27 |
+
"cancelUpload": "Cancel upload",
|
| 28 |
+
"removeAttachment": "Remove attachment"
|
| 29 |
+
},
|
| 30 |
+
"newChatDialog": {
|
| 31 |
+
"createNewChat": "Create new chat?",
|
| 32 |
+
"clearChat": "This will clear the current messages and start a new chat.",
|
| 33 |
+
"cancel": "Cancel",
|
| 34 |
+
"confirm": "Confirm"
|
| 35 |
+
},
|
| 36 |
+
"settingsModal": {
|
| 37 |
+
"settings": "Settings",
|
| 38 |
+
"expandMessages": "Expand Messages",
|
| 39 |
+
"hideChainOfThought": "Hide Chain of Thought",
|
| 40 |
+
"darkMode": "Dark Mode"
|
| 41 |
+
},
|
| 42 |
+
"detailsButton": {
|
| 43 |
+
"using": "Using",
|
| 44 |
+
"running": "Running",
|
| 45 |
+
"took_one": "Took {{count}} step",
|
| 46 |
+
"took_other": "Took {{count}} steps"
|
| 47 |
+
},
|
| 48 |
+
"auth": {
|
| 49 |
+
"authLogin": {
|
| 50 |
+
"title": "Login to access the app.",
|
| 51 |
+
"form": {
|
| 52 |
+
"email": "Email address",
|
| 53 |
+
"password": "Password",
|
| 54 |
+
"noAccount": "Don't have an account?",
|
| 55 |
+
"alreadyHaveAccount": "Already have an account?",
|
| 56 |
+
"signup": "Sign Up",
|
| 57 |
+
"signin": "Sign In",
|
| 58 |
+
"or": "OR",
|
| 59 |
+
"continue": "Continue",
|
| 60 |
+
"forgotPassword": "Forgot password?",
|
| 61 |
+
"passwordMustContain": "Your password must contain:",
|
| 62 |
+
"emailRequired": "email is a required field",
|
| 63 |
+
"passwordRequired": "password is a required field"
|
| 64 |
+
},
|
| 65 |
+
"error": {
|
| 66 |
+
"default": "Unable to sign in.",
|
| 67 |
+
"signin": "Try signing in with a different account.",
|
| 68 |
+
"oauthsignin": "Try signing in with a different account.",
|
| 69 |
+
"redirect_uri_mismatch": "The redirect URI is not matching the oauth app configuration.",
|
| 70 |
+
"oauthcallbackerror": "Try signing in with a different account.",
|
| 71 |
+
"oauthcreateaccount": "Try signing in with a different account.",
|
| 72 |
+
"emailcreateaccount": "Try signing in with a different account.",
|
| 73 |
+
"callback": "Try signing in with a different account.",
|
| 74 |
+
"oauthaccountnotlinked": "To confirm your identity, sign in with the same account you used originally.",
|
| 75 |
+
"emailsignin": "The e-mail could not be sent.",
|
| 76 |
+
"emailverify": "Please verify your email, a new email has been sent.",
|
| 77 |
+
"credentialssignin": "Sign in failed. Check the details you provided are correct.",
|
| 78 |
+
"sessionrequired": "Please sign in to access this page."
|
| 79 |
+
}
|
| 80 |
+
},
|
| 81 |
+
"authVerifyEmail": {
|
| 82 |
+
"almostThere": "You're almost there! We've sent an email to ",
|
| 83 |
+
"verifyEmailLink": "Please click on the link in that email to complete your signup.",
|
| 84 |
+
"didNotReceive": "Can't find the email?",
|
| 85 |
+
"resendEmail": "Resend email",
|
| 86 |
+
"goBack": "Go Back",
|
| 87 |
+
"emailSent": "Email sent successfully.",
|
| 88 |
+
"verifyEmail": "Verify your email address"
|
| 89 |
+
},
|
| 90 |
+
"providerButton": {
|
| 91 |
+
"continue": "Continue with {{provider}}",
|
| 92 |
+
"signup": "Sign up with {{provider}}"
|
| 93 |
+
},
|
| 94 |
+
"authResetPassword": {
|
| 95 |
+
"newPasswordRequired": "New password is a required field",
|
| 96 |
+
"passwordsMustMatch": "Passwords must match",
|
| 97 |
+
"confirmPasswordRequired": "Confirm password is a required field",
|
| 98 |
+
"newPassword": "New password",
|
| 99 |
+
"confirmPassword": "Confirm password",
|
| 100 |
+
"resetPassword": "Reset Password"
|
| 101 |
+
},
|
| 102 |
+
"authForgotPassword": {
|
| 103 |
+
"email": "Email address",
|
| 104 |
+
"emailRequired": "email is a required field",
|
| 105 |
+
"emailSent": "Please check the email address {{email}} for instructions to reset your password.",
|
| 106 |
+
"enterEmail": "Enter your email address and we will send you instructions to reset your password.",
|
| 107 |
+
"resendEmail": "Resend email",
|
| 108 |
+
"continue": "Continue",
|
| 109 |
+
"goBack": "Go Back"
|
| 110 |
+
}
|
| 111 |
+
}
|
| 112 |
+
},
|
| 113 |
+
"organisms": {
|
| 114 |
+
"chat": {
|
| 115 |
+
"history": {
|
| 116 |
+
"index": {
|
| 117 |
+
"showHistory": "Show history",
|
| 118 |
+
"lastInputs": "Last Inputs",
|
| 119 |
+
"noInputs": "Such empty...",
|
| 120 |
+
"loading": "Loading..."
|
| 121 |
+
}
|
| 122 |
+
},
|
| 123 |
+
"inputBox": {
|
| 124 |
+
"input": {
|
| 125 |
+
"placeholder": "Type your message here..."
|
| 126 |
+
},
|
| 127 |
+
"speechButton": {
|
| 128 |
+
"start": "Start recording",
|
| 129 |
+
"stop": "Stop recording"
|
| 130 |
+
},
|
| 131 |
+
"SubmitButton": {
|
| 132 |
+
"sendMessage": "Send message",
|
| 133 |
+
"stopTask": "Stop Task"
|
| 134 |
+
},
|
| 135 |
+
"UploadButton": {
|
| 136 |
+
"attachFiles": "Attach files"
|
| 137 |
+
},
|
| 138 |
+
"waterMark": {
|
| 139 |
+
"text": ""
|
| 140 |
+
}
|
| 141 |
+
},
|
| 142 |
+
"Messages": {
|
| 143 |
+
"index": {
|
| 144 |
+
"running": "Running",
|
| 145 |
+
"executedSuccessfully": "executed successfully",
|
| 146 |
+
"failed": "failed",
|
| 147 |
+
"feedbackUpdated": "Feedback updated",
|
| 148 |
+
"updating": "Updating"
|
| 149 |
+
}
|
| 150 |
+
},
|
| 151 |
+
"dropScreen": {
|
| 152 |
+
"dropYourFilesHere": "Drop your files here"
|
| 153 |
+
},
|
| 154 |
+
"index": {
|
| 155 |
+
"failedToUpload": "Failed to upload",
|
| 156 |
+
"cancelledUploadOf": "Cancelled upload of",
|
| 157 |
+
"couldNotReachServer": "Could not reach the server",
|
| 158 |
+
"continuingChat": "Continuing previous chat"
|
| 159 |
+
},
|
| 160 |
+
"settings": {
|
| 161 |
+
"settingsPanel": "Settings panel",
|
| 162 |
+
"reset": "Reset",
|
| 163 |
+
"cancel": "Cancel",
|
| 164 |
+
"confirm": "Confirm"
|
| 165 |
+
}
|
| 166 |
+
},
|
| 167 |
+
"threadHistory": {
|
| 168 |
+
"sidebar": {
|
| 169 |
+
"filters": {
|
| 170 |
+
"FeedbackSelect": {
|
| 171 |
+
"feedbackAll": "Feedback: All",
|
| 172 |
+
"feedbackPositive": "Feedback: Positive",
|
| 173 |
+
"feedbackNegative": "Feedback: Negative"
|
| 174 |
+
},
|
| 175 |
+
"SearchBar": {
|
| 176 |
+
"search": "Search"
|
| 177 |
+
}
|
| 178 |
+
},
|
| 179 |
+
"DeleteThreadButton": {
|
| 180 |
+
"confirmMessage": "This will delete the thread as well as it's messages and elements.",
|
| 181 |
+
"cancel": "Cancel",
|
| 182 |
+
"confirm": "Confirm",
|
| 183 |
+
"deletingChat": "Deleting chat",
|
| 184 |
+
"chatDeleted": "Chat deleted"
|
| 185 |
+
},
|
| 186 |
+
"index": {
|
| 187 |
+
"pastChats": "Past Chats"
|
| 188 |
+
},
|
| 189 |
+
"ThreadList": {
|
| 190 |
+
"empty": "Empty...",
|
| 191 |
+
"today": "Today",
|
| 192 |
+
"yesterday": "Yesterday",
|
| 193 |
+
"previous7days": "Previous 7 days",
|
| 194 |
+
"previous30days": "Previous 30 days"
|
| 195 |
+
},
|
| 196 |
+
"TriggerButton": {
|
| 197 |
+
"closeSidebar": "Close sidebar",
|
| 198 |
+
"openSidebar": "Open sidebar"
|
| 199 |
+
}
|
| 200 |
+
},
|
| 201 |
+
"Thread": {
|
| 202 |
+
"backToChat": "Go back to chat",
|
| 203 |
+
"chatCreatedOn": "This chat was created on"
|
| 204 |
+
}
|
| 205 |
+
},
|
| 206 |
+
"header": {
|
| 207 |
+
"chat": "Chat",
|
| 208 |
+
"readme": "Readme"
|
| 209 |
+
}
|
| 210 |
+
}
|
| 211 |
+
},
|
| 212 |
+
"hooks": {
|
| 213 |
+
"useLLMProviders": {
|
| 214 |
+
"failedToFetchProviders": "Failed to fetch providers:"
|
| 215 |
+
}
|
| 216 |
+
},
|
| 217 |
+
"pages": {
|
| 218 |
+
"Design": {},
|
| 219 |
+
"Env": {
|
| 220 |
+
"savedSuccessfully": "Saved successfully",
|
| 221 |
+
"requiredApiKeys": "Required API Keys",
|
| 222 |
+
"requiredApiKeysInfo": "To use this app, the following API keys are required. The keys are stored on your device's local storage."
|
| 223 |
+
},
|
| 224 |
+
"Page": {
|
| 225 |
+
"notPartOfProject": "You are not part of this project."
|
| 226 |
+
},
|
| 227 |
+
"ResumeButton": {
|
| 228 |
+
"resumeChat": "Resume Chat"
|
| 229 |
+
}
|
| 230 |
+
}
|
| 231 |
+
}
|
public/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
public/coins-electronics-svgrepo-com.svg
ADDED
|
|
public/favicon.ico
ADDED
|
|
public/logo_dark.png
ADDED
|
public/logo_light.png
ADDED
|
public/news-svgrepo-com.svg
ADDED
|
|
public/script.js
ADDED
|
@@ -0,0 +1,113 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
document.addEventListener('DOMContentLoaded', () => {
|
| 2 |
+
const items = { ...localStorage };
|
| 3 |
+
const isAuthed = items?.token;
|
| 4 |
+
|
| 5 |
+
const wdgt = {
|
| 6 |
+
idBox: 'wdgt',
|
| 7 |
+
url_widget: 'https://next-starter.thirdweb-example.com/',
|
| 8 |
+
url_style: 'http://localhost:8080/css/widget.css',
|
| 9 |
+
|
| 10 |
+
init: function (identifier, byClass = false) {
|
| 11 |
+
console.log("Begin Widget initialization");
|
| 12 |
+
|
| 13 |
+
const observer = new MutationObserver((mutations, observer) => {
|
| 14 |
+
let elements = [];
|
| 15 |
+
if (byClass) {
|
| 16 |
+
elements = document.querySelectorAll(`.${identifier}`);
|
| 17 |
+
console.log(`Looking for elements by class: .${identifier}`);
|
| 18 |
+
} else {
|
| 19 |
+
const element = document.getElementById(identifier || this.idBox);
|
| 20 |
+
if (element) {
|
| 21 |
+
elements.push(element);
|
| 22 |
+
}
|
| 23 |
+
console.log(`Looking for element by ID: #${identifier || this.idBox}`);
|
| 24 |
+
}
|
| 25 |
+
|
| 26 |
+
console.log(`Found elements:`, elements);
|
| 27 |
+
|
| 28 |
+
if (elements.length > 0 && elements[0] != null) {
|
| 29 |
+
observer.disconnect(); // Stop observing once elements are found
|
| 30 |
+
try {
|
| 31 |
+
const XHR = ("onload" in new XMLHttpRequest()) ? XMLHttpRequest : XDomainRequest;
|
| 32 |
+
const xhr = new XHR();
|
| 33 |
+
xhr.open('GET', this.url_widget, true);
|
| 34 |
+
|
| 35 |
+
xhr.onload = function () {
|
| 36 |
+
if (this.status >= 200 && this.status < 300) {
|
| 37 |
+
elements.forEach(element => {
|
| 38 |
+
element.innerHTML = this.responseText;
|
| 39 |
+
});
|
| 40 |
+
} else {
|
| 41 |
+
console.error('Failed to load widget content. Status:', this.status);
|
| 42 |
+
}
|
| 43 |
+
};
|
| 44 |
+
|
| 45 |
+
xhr.onerror = function () {
|
| 46 |
+
console.error('onerror', this.status);
|
| 47 |
+
};
|
| 48 |
+
|
| 49 |
+
xhr.send();
|
| 50 |
+
} catch (error) {
|
| 51 |
+
console.error('Error loading widget:', error);
|
| 52 |
+
}
|
| 53 |
+
}
|
| 54 |
+
});
|
| 55 |
+
|
| 56 |
+
// Start observing the document body for changes
|
| 57 |
+
observer.observe(document.body, { childList: true, subtree: true });
|
| 58 |
+
|
| 59 |
+
// Initial check in case the elements are already present
|
| 60 |
+
let elements = [];
|
| 61 |
+
if (byClass) {
|
| 62 |
+
elements = document.querySelectorAll(`.${identifier}`);
|
| 63 |
+
} else {
|
| 64 |
+
const element = document.getElementById(identifier || this.idBox);
|
| 65 |
+
if (element) {
|
| 66 |
+
elements.push(element);
|
| 67 |
+
}
|
| 68 |
+
}
|
| 69 |
+
|
| 70 |
+
if (elements.length > 0 && elements[0] != null) {
|
| 71 |
+
observer.disconnect(); // Stop observing if elements are found initially
|
| 72 |
+
try {
|
| 73 |
+
const XHR = ("onload" in new XMLHttpRequest()) ? XMLHttpRequest : XDomainRequest;
|
| 74 |
+
const xhr = new XHR();
|
| 75 |
+
xhr.open('GET', this.url_widget, true);
|
| 76 |
+
|
| 77 |
+
xhr.onload = function () {
|
| 78 |
+
if (this.status >= 200 && this.status < 300) {
|
| 79 |
+
elements.forEach(element => {
|
| 80 |
+
element.innerHTML = this.responseText;
|
| 81 |
+
});
|
| 82 |
+
} else {
|
| 83 |
+
console.error('Failed to load widget content. Status:', this.status);
|
| 84 |
+
}
|
| 85 |
+
};
|
| 86 |
+
|
| 87 |
+
xhr.onerror = function () {
|
| 88 |
+
console.error('onerror', this.status);
|
| 89 |
+
};
|
| 90 |
+
|
| 91 |
+
xhr.send();
|
| 92 |
+
} catch (error) {
|
| 93 |
+
console.error('Error loading widget:', error);
|
| 94 |
+
}
|
| 95 |
+
}
|
| 96 |
+
},
|
| 97 |
+
|
| 98 |
+
addStyle: function () {
|
| 99 |
+
const style = document.createElement('link');
|
| 100 |
+
style.rel = 'stylesheet';
|
| 101 |
+
style.type = 'text/css';
|
| 102 |
+
style.href = this.url_style;
|
| 103 |
+
document.head.appendChild(style);
|
| 104 |
+
}
|
| 105 |
+
};
|
| 106 |
+
|
| 107 |
+
if (isAuthed) {
|
| 108 |
+
// alert('You are authed');
|
| 109 |
+
} else {
|
| 110 |
+
// alert('You are not authed');
|
| 111 |
+
// wdgt.init('root', false); // Change 'true' to 'false' if you want to use an ID instead of class
|
| 112 |
+
}
|
| 113 |
+
});
|
public/stockchart-svgrepo-com.svg
ADDED
|
|
public/stylesheet.css
ADDED
|
@@ -0,0 +1,3 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
.watermark, .css-1705j0v, .css-1rplq84 {
|
| 2 |
+
display: none!important
|
| 3 |
+
}
|
public/wallet-svgrepo-com.svg
ADDED
|
|
src/.DS_Store
ADDED
|
Binary file (10.2 kB). View file
|
|
|
src/config/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
src/config/nemoguardrails/config.yml
ADDED
|
@@ -0,0 +1,27 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
# config.yml
|
| 2 |
+
models:
|
| 3 |
+
- type: main
|
| 4 |
+
engine: openai
|
| 5 |
+
model: gpt-3.5-turbo-instruct
|
| 6 |
+
|
| 7 |
+
rails:
|
| 8 |
+
# Input rails are invoked when new input from the user is received.
|
| 9 |
+
input:
|
| 10 |
+
flows:
|
| 11 |
+
- check jailbreak
|
| 12 |
+
- mask sensitive data on input
|
| 13 |
+
|
| 14 |
+
# Output rails are triggered after a bot message has been generated.
|
| 15 |
+
output:
|
| 16 |
+
flows:
|
| 17 |
+
- self check facts
|
| 18 |
+
- self check hallucination
|
| 19 |
+
- activefence moderation
|
| 20 |
+
|
| 21 |
+
config:
|
| 22 |
+
# Configure the types of entities that should be masked on user input.
|
| 23 |
+
sensitive_data_detection:
|
| 24 |
+
input:
|
| 25 |
+
entities:
|
| 26 |
+
- PERSON
|
| 27 |
+
- EMAIL_ADDRESS
|
src/config/telzho-430515-c2622883c30e.json
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
{
|
| 2 |
+
"type": "service_account",
|
| 3 |
+
"project_id": "telzho-430515",
|
| 4 |
+
"private_key_id": "c2622883c30ecd05999180754516650bf6b692c5",
|
| 5 |
+
"private_key": "-----BEGIN PRIVATE KEY-----\nMIIEvgIBADANBgkqhkiG9w0BAQEFAASCBKgwggSkAgEAAoIBAQCa/PmlpE9CQVqb\nwsJHyDE8PCaieLWlPvIgslDArY36nvvtIaZuVkMjQkUyj4qJow/8sLF4PWt6ROMC\n+y5tZoPVk917EXKTCLySU/Q0lhZ3Y7Ftq+Y1Rilw4Mea0SRm0oFNEnnhmhBW3TCX\nvoFd+1BaEpK+lP7/4gN6FIjNZkOmqis0ikAlMppt0/Uni3HXozfY8o4Zeao41ebu\nJudYKfV2q9aizM+ZP615Dy225H1bQWEUKY9spvFt+HFat73mIFzFf6caFh04fVcf\nDdslEfJ9DAQESiI0QTLITyNmMxyl7h0E7Wc9saeCmQ2B8lc7L/HaonotveC7gdx1\nrEKi3LnnAgMBAAECggEAFzKzkIjIxDtRUJcUyqG68i/JU0WLYAcik/pHU/pQ51On\nPusWeiq3NIPVelKPLeLA9dWMpVlWyPTz9ZzwagzkwVxh05Xc9we2Z0CMVi1Q14s4\nd23ksHymn0VFxcp1ZF6Fs/+GoVnqBYETUOMsLIun5JN7G245whsWN3jzW6sUMnxy\nMRCMxl2SMkLvdjyeAOwUSaEj+c9/aSHW9mu90RUhZDxKY9xIj7ldQKC+xuVkYHBV\nwz2TQr0sIZz4aC/T+e6c6jPqVP8sWkA/QU1QhcpySzDe4ih3xbG4Hve/ZkO+h3KE\nEcw+BwDBtHLWJV2zlmvGX84Vz7VS1BFlBRZQWrFIIQKBgQDW09iZhNffer3wo04M\nRG1C5idRGL7JWWudoJbenRcYR8hco3A0WHTSicS5AV6QdN5eO8f4HQaI59TMtFWm\nGbun+d5uNRg6egIO4Yr9Am3xIpSzvulrA0d8V6+aSWkP/hCf+w0tB/AJKeIVxTgr\nCO9L5n1Wnf3+az1dL4rIi1Z4UwKBgQC4sTY2pNEYI8WgeuCfTCvDvGdvGlHCnixh\nYCSlHu7jnqWS+kf+6TGD2yBMNwhLDA2Y3YN8yaXL5ohK2BEd14fG1Wu5wg0gVP8x\nO+VLhmUo+p9qOE+mSxGj6P0cqv3FhHMXqtZ0PGZCLFWtgqfGaZiYAtok+eb/wAdv\naVPlB+11nQKBgQC/VnIN0g7/gfAYmyrRrERwmFMeXN7YsSczfqvBXMMMIisnr34p\ncTujpYcsjDawjRbdcXta1mdSmsGjDV9XkIXeR6PAHtQUiUS6AlJrpLVNV1KAgaKe\naI5MBkFmNso3rz8Zbl4+5Fn7n205nK25Trzrqj+qXQ+zgcpm7Ag9NDVw9wKBgHMK\nBp+wt4DWv0E/aGwwsM3hq3ebqDOV/ETG8uxawSfMaIuNE40myS29Lnfqi2LbZhjC\nWSVAIOJJfLlJrhetaSgjRU9WHID4EGAlU6RgWOBWgzdRTeNhvOIgvI+/vgPqhRnH\n0n5iJLuxC8jgqgo/OcntdN01P3orrTXrIFN3RaRNAoGBAIBhseLNAbMVXLO08lyM\nSnzPAdcOnl4FvdSK6+187guIs6MR9yzGsKKAPKx7AGP3pOo0j3MiN7CjPEFpukWF\nEJb8x6cQYA0GPG4nA/DYfDp+uTVqtmUGDqoLhH94xzBXgOE88vS+kUMlh5/aFM1S\nJtD2H1eB8JSJrlZvHeonj/Jx\n-----END PRIVATE KEY-----\n",
|
| 6 |
+
"client_email": "icodeidea@telzho-430515.iam.gserviceaccount.com",
|
| 7 |
+
"client_id": "105989590052836790986",
|
| 8 |
+
"auth_uri": "https://accounts.google.com/o/oauth2/auth",
|
| 9 |
+
"token_uri": "https://oauth2.googleapis.com/token",
|
| 10 |
+
"auth_provider_x509_cert_url": "https://www.googleapis.com/oauth2/v1/certs",
|
| 11 |
+
"client_x509_cert_url": "https://www.googleapis.com/robot/v1/metadata/x509/icodeidea%40telzho-430515.iam.gserviceaccount.com",
|
| 12 |
+
"universe_domain": "googleapis.com"
|
| 13 |
+
}
|
src/data_sources/coin_gecko.py
ADDED
|
@@ -0,0 +1,266 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from dotenv import load_dotenv
|
| 3 |
+
from src.databases.redis import REDIS_CACHED
|
| 4 |
+
from src.libs.constants import ONE_HOUR_IN_SECONDS, ONE_MINUTE_IN_SECONDS, ONE_MONTH_IN_SECONDS
|
| 5 |
+
from src.libs.logger import logger
|
| 6 |
+
from coingecko import CoinGeckoProClient, CoinGeckoDemoClient
|
| 7 |
+
|
| 8 |
+
load_dotenv()
|
| 9 |
+
|
| 10 |
+
redis_cache = REDIS_CACHED
|
| 11 |
+
|
| 12 |
+
class CoinGecko:
|
| 13 |
+
def __init__(self, pro_api: bool = False) -> None:
|
| 14 |
+
"""
|
| 15 |
+
Initializes the CoinGecko object.
|
| 16 |
+
|
| 17 |
+
Args:
|
| 18 |
+
pro_api (bool, optional): A boolean value indicating whether to use the Pro API. Defaults to False.
|
| 19 |
+
|
| 20 |
+
Returns:
|
| 21 |
+
None
|
| 22 |
+
|
| 23 |
+
Raises:
|
| 24 |
+
ValueError: If the `pro_api` argument is not a boolean value.
|
| 25 |
+
|
| 26 |
+
Note:
|
| 27 |
+
This method initializes the CoinGecko object by creating an instance of either CoinGeckoProClient or CoinGeckoDemoClient based on the `pro_api` argument. If `pro_api` is `True`, it uses the Pro API with the provided API key; otherwise, it uses the Demo API.
|
| 28 |
+
"""
|
| 29 |
+
if not isinstance(pro_api, bool):
|
| 30 |
+
raise ValueError("The `pro_api` argument must be a boolean value.")
|
| 31 |
+
|
| 32 |
+
if pro_api:
|
| 33 |
+
self.cgc = CoinGeckoProClient(api_key=os.getenv('COINGECKO_PRO_API_KEY'))
|
| 34 |
+
else:
|
| 35 |
+
self.cgc = CoinGeckoDemoClient(api_key=os.getenv('COINGECKO_DEMO_API_KEY'))
|
| 36 |
+
|
| 37 |
+
@redis_cache(ttl=ONE_MINUTE_IN_SECONDS)
|
| 38 |
+
@logger.instrument()
|
| 39 |
+
def get_coin_price(self, ids: list[str], vs_currencies: list[str], cache_ttl: int = None) -> dict:
|
| 40 |
+
"""
|
| 41 |
+
Retrieves the current price of a list of coins in a specified currency.
|
| 42 |
+
|
| 43 |
+
Args:
|
| 44 |
+
ids (list[str]): A list of unique identifiers for the coins to retrieve prices for.
|
| 45 |
+
vs_currencies (list[str]): A list of currency codes to convert the coin prices to.
|
| 46 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
dict: A dictionary containing the current prices of the specified coins in the specified currencies.
|
| 50 |
+
|
| 51 |
+
Example:
|
| 52 |
+
To get the current prices of a list of coins in USD, you can call:
|
| 53 |
+
|
| 54 |
+
```python
|
| 55 |
+
coin_prices = coin_gecko.get_coin_price(ids=['bitcoin', 'ethereum'], vs_currencies=['usd'])
|
| 56 |
+
```
|
| 57 |
+
|
| 58 |
+
Note:
|
| 59 |
+
The `get_coin_price` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_MINUTE_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 60 |
+
"""
|
| 61 |
+
result = self.cgc.simple.get_price(ids=ids, vs_currencies=vs_currencies)
|
| 62 |
+
|
| 63 |
+
return result
|
| 64 |
+
|
| 65 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 66 |
+
@logger.instrument()
|
| 67 |
+
def get_coin_data(self, id: str, cache_ttl: int = None) -> dict:
|
| 68 |
+
"""
|
| 69 |
+
Retrieves detailed information about a specific coin.
|
| 70 |
+
|
| 71 |
+
Args:
|
| 72 |
+
id (str): The unique identifier of the coin.
|
| 73 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 74 |
+
|
| 75 |
+
Returns:
|
| 76 |
+
dict: A dictionary containing the detailed information about the specified coin.
|
| 77 |
+
|
| 78 |
+
Example:
|
| 79 |
+
To get data for the coin with id 'bitcoin', you can call:
|
| 80 |
+
|
| 81 |
+
```python
|
| 82 |
+
coin_data = coin_gecko.get_coin_data(id='bitcoin')
|
| 83 |
+
```
|
| 84 |
+
|
| 85 |
+
Note:
|
| 86 |
+
The `get_coin_data` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_HOUR_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 87 |
+
"""
|
| 88 |
+
result = self.cgc.coins.get_id(id=id, localization=False, market_data=False, tickers=False, sparkline=False)
|
| 89 |
+
logger.debug(f"Result: {result}")
|
| 90 |
+
|
| 91 |
+
return result
|
| 92 |
+
|
| 93 |
+
@redis_cache(ttl=ONE_MONTH_IN_SECONDS)
|
| 94 |
+
@logger.instrument()
|
| 95 |
+
def get_coin_category_data(self, params: dict = None, cache_ttl: int = None) -> dict:
|
| 96 |
+
"""
|
| 97 |
+
Retrieves data for all available coin categories.
|
| 98 |
+
|
| 99 |
+
Args:
|
| 100 |
+
params (dict, optional): A dictionary containing optional parameters for the request. Defaults to None.
|
| 101 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 102 |
+
|
| 103 |
+
Returns:
|
| 104 |
+
dict: A dictionary containing the data for all available coin categories.
|
| 105 |
+
|
| 106 |
+
Example:
|
| 107 |
+
To get data for all available coin categories, you can call:
|
| 108 |
+
|
| 109 |
+
```python
|
| 110 |
+
category_data = coin_gecko.get_coin_category_data()
|
| 111 |
+
```
|
| 112 |
+
|
| 113 |
+
Note:
|
| 114 |
+
The `get_coin_category_data` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_MONTH_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 115 |
+
"""
|
| 116 |
+
params = params or {
|
| 117 |
+
"order": "market_cap_desc",
|
| 118 |
+
}
|
| 119 |
+
|
| 120 |
+
result = self.cgc.categories.get_data(endpoint="coins/categories", params=params)
|
| 121 |
+
logger.debug(f"Result: {result}")
|
| 122 |
+
|
| 123 |
+
return { "categories": result}
|
| 124 |
+
|
| 125 |
+
@redis_cache(ttl=ONE_MONTH_IN_SECONDS)
|
| 126 |
+
@logger.instrument()
|
| 127 |
+
def get_exchanges_list(self, cache_ttl: int = None) -> dict:
|
| 128 |
+
"""
|
| 129 |
+
Retrieves a list of all available exchanges.
|
| 130 |
+
|
| 131 |
+
Args:
|
| 132 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 133 |
+
|
| 134 |
+
Returns:
|
| 135 |
+
dict: A dictionary containing a list of all available exchanges.
|
| 136 |
+
|
| 137 |
+
Example:
|
| 138 |
+
To get a list of all available exchanges, you can call:
|
| 139 |
+
|
| 140 |
+
```python
|
| 141 |
+
exchange_list = coin_gecko.get_exchanges_list()
|
| 142 |
+
```
|
| 143 |
+
|
| 144 |
+
Note:
|
| 145 |
+
The `get_exchanges_list` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_MONTH_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 146 |
+
"""
|
| 147 |
+
result = self.cgc.exchanges.get_list()
|
| 148 |
+
logger.debug(f"Result: {result}")
|
| 149 |
+
|
| 150 |
+
return { 'exchanges': result }
|
| 151 |
+
|
| 152 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 153 |
+
@logger.instrument()
|
| 154 |
+
def get_exchange_data(self, id: str = None, cache_ttl: int = None) -> dict:
|
| 155 |
+
"""
|
| 156 |
+
Retrieves detailed information about a specific exchange.
|
| 157 |
+
|
| 158 |
+
Args:
|
| 159 |
+
id (str, optional): The unique identifier of the exchange. Defaults to None.
|
| 160 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 161 |
+
|
| 162 |
+
Returns:
|
| 163 |
+
dict: A dictionary containing the detailed information about the specified exchange.
|
| 164 |
+
|
| 165 |
+
Raises:
|
| 166 |
+
ValueError: If the `id` is not provided and `None` is passed as the argument.
|
| 167 |
+
|
| 168 |
+
Example:
|
| 169 |
+
To get data for the exchange with id 'binance', you can call:
|
| 170 |
+
|
| 171 |
+
```python
|
| 172 |
+
exchange_data = coin_gecko.get_exchange_data(id='binance')
|
| 173 |
+
```
|
| 174 |
+
|
| 175 |
+
Note:
|
| 176 |
+
The `get_exchange_data` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_HOUR_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 177 |
+
"""
|
| 178 |
+
if id is None:
|
| 179 |
+
raise ValueError("Exchange ID must be provided.")
|
| 180 |
+
|
| 181 |
+
result = self.cgc.exchanges.get_id(id=id)
|
| 182 |
+
logger.debug(f"Result: {result}")
|
| 183 |
+
|
| 184 |
+
return result
|
| 185 |
+
|
| 186 |
+
@redis_cache(ttl=ONE_MONTH_IN_SECONDS)
|
| 187 |
+
@logger.instrument()
|
| 188 |
+
def get_coin_categories_list(self, cache_ttl: int = None) -> dict:
|
| 189 |
+
"""
|
| 190 |
+
Retrieves a list of all available coin categories.
|
| 191 |
+
|
| 192 |
+
Args:
|
| 193 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 194 |
+
|
| 195 |
+
Returns:
|
| 196 |
+
dict: A dictionary containing a list of all available coin categories.
|
| 197 |
+
|
| 198 |
+
Example:
|
| 199 |
+
To get a list of all available coin categories, you can call:
|
| 200 |
+
|
| 201 |
+
```python
|
| 202 |
+
category_list = coin_gecko.get_coin_categories_list()
|
| 203 |
+
```
|
| 204 |
+
|
| 205 |
+
Note:
|
| 206 |
+
The `get_coin_categories_list` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_MONTH_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 207 |
+
"""
|
| 208 |
+
result = self.cgc.categories.get()
|
| 209 |
+
logger.debug(f"Result: {result}")
|
| 210 |
+
|
| 211 |
+
return { 'exchanges': result }
|
| 212 |
+
|
| 213 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 214 |
+
@logger.instrument()
|
| 215 |
+
def get_asset_platforms_list(self, cache_ttl: int = None) -> dict:
|
| 216 |
+
"""
|
| 217 |
+
Retrieves a list of all available asset platforms.
|
| 218 |
+
|
| 219 |
+
Args:
|
| 220 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 221 |
+
|
| 222 |
+
Returns:
|
| 223 |
+
dict: A dictionary containing a list of all available asset platforms.
|
| 224 |
+
|
| 225 |
+
Example:
|
| 226 |
+
To get a list of all available asset platforms, you can call:
|
| 227 |
+
|
| 228 |
+
```python
|
| 229 |
+
asset_platforms_list = coin_gecko.get_asset_platforms_list()
|
| 230 |
+
```
|
| 231 |
+
|
| 232 |
+
Note:
|
| 233 |
+
The `get_asset_platforms_list` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_HOUR_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 234 |
+
"""
|
| 235 |
+
result = self.cgc.asset_platform.get()
|
| 236 |
+
logger.debug(f"Result: {result}")
|
| 237 |
+
|
| 238 |
+
return { 'exchanges': result }
|
| 239 |
+
|
| 240 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 241 |
+
@logger.instrument()
|
| 242 |
+
def get_trending_coin_list(self, cache_ttl: int = None) -> dict:
|
| 243 |
+
"""
|
| 244 |
+
Retrieves a list of query trending 15 coins, 7 nfts and 5 categories on CoinGecko in the last 24 hours.
|
| 245 |
+
|
| 246 |
+
Args:
|
| 247 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached data. Defaults to None.
|
| 248 |
+
|
| 249 |
+
Returns:
|
| 250 |
+
dict: A dictionary containing a list of trending coins.
|
| 251 |
+
|
| 252 |
+
Example:
|
| 253 |
+
To get a list of trending coins, you can call:
|
| 254 |
+
|
| 255 |
+
```python
|
| 256 |
+
trending_coins = coin_gecko.get_trending_coin_list()
|
| 257 |
+
```
|
| 258 |
+
|
| 259 |
+
Note:
|
| 260 |
+
The `get_trending_coin_list` method uses the `redis_cache` decorator to cache the result for a specified time (`ONE_HOUR_IN_SECONDS`). This means that if the same request is made within the cache time, the cached result will be returned instead of making a new API call.
|
| 261 |
+
"""
|
| 262 |
+
result = self.cgc.search.get_trending()
|
| 263 |
+
logger.debug(f"Result: {result}")
|
| 264 |
+
|
| 265 |
+
return { 'exchanges': result }
|
| 266 |
+
|
src/data_sources/cryptocompare.py
ADDED
|
@@ -0,0 +1,325 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from dotenv import load_dotenv
|
| 3 |
+
load_dotenv()
|
| 4 |
+
|
| 5 |
+
import httpx
|
| 6 |
+
from enum import Enum
|
| 7 |
+
from src.libs.logger import logger
|
| 8 |
+
from src.databases.redis import REDIS_CACHED
|
| 9 |
+
from src.libs.helper_functions import convert_to_snakecase
|
| 10 |
+
from src.libs.constants import (
|
| 11 |
+
ONE_HOUR_IN_SECONDS,
|
| 12 |
+
ONE_MONTH_IN_SECONDS,
|
| 13 |
+
ONE_MINUTE_IN_SECONDS,
|
| 14 |
+
ONE_QUARTER_IN_SECONDS,
|
| 15 |
+
CRYPTO_COMPARE_API_BASE_URL,
|
| 16 |
+
CRYPTO_COMPARE_ASSET_DATA_API_BASE_URL,
|
| 17 |
+
)
|
| 18 |
+
|
| 19 |
+
redis_cache = REDIS_CACHED
|
| 20 |
+
|
| 21 |
+
class CryptoCompareNewsSortOrder(Enum):
|
| 22 |
+
LATEST = "latest"
|
| 23 |
+
POPULAR = "popular"
|
| 24 |
+
|
| 25 |
+
def __str__(self):
|
| 26 |
+
return self.value
|
| 27 |
+
|
| 28 |
+
class CryptoCompareLanguages(Enum):
|
| 29 |
+
English = "EN"
|
| 30 |
+
French = "FR"
|
| 31 |
+
Espanol = "ES"
|
| 32 |
+
Turkish = "TR"
|
| 33 |
+
Portuguese = "PT"
|
| 34 |
+
|
| 35 |
+
def __str__(self):
|
| 36 |
+
return self.value
|
| 37 |
+
|
| 38 |
+
class CryptoCompare:
|
| 39 |
+
def __init__(self, base_url: str = None, asset_data_base_url: str = None) -> None:
|
| 40 |
+
self.CRYPTO_COMPARE_API_BASE_URL = base_url or CRYPTO_COMPARE_API_BASE_URL
|
| 41 |
+
self.CRYPTO_COMPARE_ASSET_DATA_API_BASE_URL = asset_data_base_url or CRYPTO_COMPARE_ASSET_DATA_API_BASE_URL
|
| 42 |
+
|
| 43 |
+
# @redis_cache(ttl=ONE_MINUTE_IN_SECONDS)
|
| 44 |
+
@logger.instrument()
|
| 45 |
+
def get_all_coins(self) -> dict:
|
| 46 |
+
url = f"{self.CRYPTO_COMPARE_API_BASE_URL}data/all/coinlist"
|
| 47 |
+
logger.debug(url)
|
| 48 |
+
params = {"api_key": os.getenv("CRYPTOCOMPARE_API_KEY")}
|
| 49 |
+
logger.debug(params)
|
| 50 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 51 |
+
logger.debug(headers)
|
| 52 |
+
|
| 53 |
+
try:
|
| 54 |
+
with httpx.Client(timeout=30.0) as client:
|
| 55 |
+
response = client.get(url, params=params, headers=headers)
|
| 56 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 57 |
+
return convert_to_snakecase(response.json())
|
| 58 |
+
except httpx.HTTPError as e:
|
| 59 |
+
print(f"An error occurred while making the request: {e}")
|
| 60 |
+
return None
|
| 61 |
+
|
| 62 |
+
@redis_cache(ttl=ONE_MINUTE_IN_SECONDS)
|
| 63 |
+
@logger.instrument()
|
| 64 |
+
def get_coin_price(self, ids: list[str], vs_currencies: list[str], cache_ttl: int = None) -> dict:
|
| 65 |
+
|
| 66 |
+
url = f"{self.CRYPTO_COMPARE_API_BASE_URL}data/pricemulti"
|
| 67 |
+
# logger.debug(url)
|
| 68 |
+
|
| 69 |
+
params = {
|
| 70 |
+
"api_key": os.getenv("CRYPTOCOMPARE_API_KEY"),
|
| 71 |
+
"fsyms": ",".join(ids),
|
| 72 |
+
"tsyms": ",".join(vs_currencies)
|
| 73 |
+
}
|
| 74 |
+
# logger.debug(f"{params}")
|
| 75 |
+
|
| 76 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 77 |
+
# logger.debug(f"{headers}")
|
| 78 |
+
|
| 79 |
+
try:
|
| 80 |
+
with httpx.Client(timeout=30.0) as client:
|
| 81 |
+
response = client.get(url, params=params, headers=headers)
|
| 82 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 83 |
+
return response.json()
|
| 84 |
+
except httpx.HTTPError as e:
|
| 85 |
+
# logger.debug(f"An error occurred while making the request: {e}")
|
| 86 |
+
print(f"An error occurred while making the request: {e}")
|
| 87 |
+
return None
|
| 88 |
+
|
| 89 |
+
@redis_cache(ttl=ONE_QUARTER_IN_SECONDS)
|
| 90 |
+
@logger.instrument()
|
| 91 |
+
def get_overall_coin_data(self, symbol: str, cache_ttl: int = None) -> dict:
|
| 92 |
+
|
| 93 |
+
url = f"{self.CRYPTO_COMPARE_ASSET_DATA_API_BASE_URL}data/by/symbol"
|
| 94 |
+
logger.debug(f"Query URL: {url}")
|
| 95 |
+
|
| 96 |
+
params = {
|
| 97 |
+
"api_key": os.getenv("CRYPTOCOMPARE_API_KEY"),
|
| 98 |
+
"asset_symbol": symbol,
|
| 99 |
+
}
|
| 100 |
+
logger.debug(f"Query params: {params}")
|
| 101 |
+
|
| 102 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 103 |
+
logger.debug(f"Query headers {headers}")
|
| 104 |
+
|
| 105 |
+
try:
|
| 106 |
+
with httpx.Client(timeout=30.0) as client:
|
| 107 |
+
response = client.get(url, params=params, headers=headers)
|
| 108 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 109 |
+
return response.json()
|
| 110 |
+
except httpx.HTTPError as e:
|
| 111 |
+
logger.debug(f"An error occurred while making the request: {e}")
|
| 112 |
+
# print(f"An error occurred while making the request: {e}")
|
| 113 |
+
return None
|
| 114 |
+
|
| 115 |
+
@redis_cache(ttl=ONE_QUARTER_IN_SECONDS)
|
| 116 |
+
@logger.instrument()
|
| 117 |
+
def get_news_categories(self, cache_ttl: int = None) -> dict:
|
| 118 |
+
"""
|
| 119 |
+
Retrieves the news categories from the CryptoCompare API.
|
| 120 |
+
|
| 121 |
+
Args:
|
| 122 |
+
cache_ttl (int, optional): The time-to-live (TTL) for the cached response. Defaults to None.
|
| 123 |
+
|
| 124 |
+
Returns:
|
| 125 |
+
dict: A dictionary containing the news categories from the CryptoCompare API.
|
| 126 |
+
|
| 127 |
+
Raises:
|
| 128 |
+
httpx.HTTPError: If an error occurs while making the request to the CryptoCompare API.
|
| 129 |
+
"""
|
| 130 |
+
url = f"{self.CRYPTO_COMPARE_API_BASE_URL}data/news/categories"
|
| 131 |
+
logger.debug(f"Query URL: {url}")
|
| 132 |
+
|
| 133 |
+
params = {
|
| 134 |
+
"api_key": os.getenv("CRYPTOCOMPARE_API_KEY"),
|
| 135 |
+
}
|
| 136 |
+
logger.debug(f"Query params: {params}")
|
| 137 |
+
|
| 138 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 139 |
+
logger.debug(f"Query headers {headers}")
|
| 140 |
+
|
| 141 |
+
try:
|
| 142 |
+
with httpx.Client(timeout=30.0) as client:
|
| 143 |
+
response = client.get(url, params=params, headers=headers)
|
| 144 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 145 |
+
return response.json()
|
| 146 |
+
except httpx.HTTPError as e:
|
| 147 |
+
logger.debug(f"An error occurred while making the request: {e}")
|
| 148 |
+
# print(f"An error occurred while making the request: {e}")
|
| 149 |
+
return None
|
| 150 |
+
|
| 151 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 152 |
+
@logger.instrument()
|
| 153 |
+
def get_latest_news_articles(
|
| 154 |
+
self,
|
| 155 |
+
language: CryptoCompareLanguages = CryptoCompareLanguages.English,
|
| 156 |
+
sort_order: CryptoCompareNewsSortOrder = CryptoCompareNewsSortOrder.LATEST,
|
| 157 |
+
cache_ttl: int = None
|
| 158 |
+
) -> dict:
|
| 159 |
+
"""
|
| 160 |
+
Retrieves the latest news articles from the CryptoCompare API.
|
| 161 |
+
|
| 162 |
+
Args:
|
| 163 |
+
language (CryptoCompareLanguages, optional): The language of the news articles. Defaults to English.
|
| 164 |
+
sort_order (CryptoCompareNewsSortOrder, optional): The sort order of the news articles. Defaults to 'latest'.
|
| 165 |
+
cache_ttl (int, optional): The time-to-live (TTL) for the cached response. Defaults to None.
|
| 166 |
+
|
| 167 |
+
Returns:
|
| 168 |
+
dict: A dictionary containing the latest news articles from the CryptoCompare API.
|
| 169 |
+
|
| 170 |
+
Raises:
|
| 171 |
+
httpx.HTTPError: If an error occurs while making the request to the CryptoCompare API.
|
| 172 |
+
|
| 173 |
+
Example:
|
| 174 |
+
# Get the latest news articles in English, sorted by popularity
|
| 175 |
+
latest_news = crypto_compare_instance.get_latest_news_articles(language=CryptoCompareLanguages.English, sort_order=CryptoCompareNewsSortOrder.POPULAR)
|
| 176 |
+
"""
|
| 177 |
+
url = f"{self.CRYPTO_COMPARE_API_BASE_URL}data/v2/news/"
|
| 178 |
+
logger.debug(f"Query URL: {url}")
|
| 179 |
+
|
| 180 |
+
params = {
|
| 181 |
+
"api_key": os.getenv("CRYPTOCOMPARE_API_KEY"),
|
| 182 |
+
"lang": language,
|
| 183 |
+
"sortOrder": sort_order,
|
| 184 |
+
}
|
| 185 |
+
logger.debug(f"Query params: {params}")
|
| 186 |
+
|
| 187 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 188 |
+
logger.debug(f"Query headers {headers}")
|
| 189 |
+
|
| 190 |
+
try:
|
| 191 |
+
with httpx.Client(timeout=30.0) as client:
|
| 192 |
+
response = client.get(url, params=params, headers=headers)
|
| 193 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 194 |
+
return response.json()
|
| 195 |
+
except httpx.HTTPError as e:
|
| 196 |
+
logger.debug(f"An error occurred while making the request: {e}")
|
| 197 |
+
# print(f"An error occurred while making the request: {e}")
|
| 198 |
+
return None
|
| 199 |
+
|
| 200 |
+
@redis_cache(ttl=ONE_MONTH_IN_SECONDS)
|
| 201 |
+
@logger.instrument()
|
| 202 |
+
def get_all_exchanges_general_info(self, cache_ttl: int = None) -> dict:
|
| 203 |
+
"""
|
| 204 |
+
Retrieves general information about all supported exchanges from the CryptoCompare API.
|
| 205 |
+
|
| 206 |
+
Args:
|
| 207 |
+
cache_ttl (int, optional): The time-to-live (TTL) for the cached response. Defaults to None.
|
| 208 |
+
|
| 209 |
+
Returns:
|
| 210 |
+
dict: A dictionary containing general information about all supported exchanges from the CryptoCompare API.
|
| 211 |
+
|
| 212 |
+
Raises:
|
| 213 |
+
httpx.HTTPError: If an error occurs while making the request to the CryptoCompare API.
|
| 214 |
+
|
| 215 |
+
Example:
|
| 216 |
+
# Get general information about all supported exchanges
|
| 217 |
+
all_exchanges_info = crypto_compare_instance.get_all_exchanges_general_info()
|
| 218 |
+
"""
|
| 219 |
+
url = f"{self.CRYPTO_COMPARE_API_BASE_URL}data/exchanges/general"
|
| 220 |
+
logger.debug(f"Query URL: {url}")
|
| 221 |
+
|
| 222 |
+
params = {
|
| 223 |
+
"api_key": os.getenv("CRYPTOCOMPARE_API_KEY"),
|
| 224 |
+
}
|
| 225 |
+
logger.debug(f"Query params: {params}")
|
| 226 |
+
|
| 227 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 228 |
+
logger.debug(f"Query headers {headers}")
|
| 229 |
+
|
| 230 |
+
try:
|
| 231 |
+
with httpx.Client(timeout=30.0) as client:
|
| 232 |
+
response = client.get(url, params=params, headers=headers)
|
| 233 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 234 |
+
return response.json()
|
| 235 |
+
except httpx.HTTPError as e:
|
| 236 |
+
logger.debug(f"An error occurred while making the request: {e}")
|
| 237 |
+
# print(f"An error occurred while making the request: {e}")
|
| 238 |
+
return None
|
| 239 |
+
|
| 240 |
+
@redis_cache(ttl=ONE_MONTH_IN_SECONDS)
|
| 241 |
+
@logger.instrument()
|
| 242 |
+
def get_all_wallets_general_info(self, cache_ttl: int = None) -> dict:
|
| 243 |
+
"""
|
| 244 |
+
Retrieves general information about all supported wallets from the CryptoCompare API.
|
| 245 |
+
|
| 246 |
+
Args:
|
| 247 |
+
cache_ttl (int, optional): The time-to-live (TTL) for the cached response. Defaults to None.
|
| 248 |
+
|
| 249 |
+
Returns:
|
| 250 |
+
dict: A dictionary containing general information about all supported wallets from the CryptoCompare API.
|
| 251 |
+
|
| 252 |
+
Raises:
|
| 253 |
+
httpx.HTTPError: If an error occurs while making the request to the CryptoCompare API.
|
| 254 |
+
|
| 255 |
+
Example:
|
| 256 |
+
# Get general information about all supported wallets
|
| 257 |
+
all_wallets_info = crypto_compare_instance.get_all_wallets_general_info()
|
| 258 |
+
"""
|
| 259 |
+
url = f"{self.CRYPTO_COMPARE_API_BASE_URL}data/wallets/general"
|
| 260 |
+
logger.debug(f"Query URL: {url}")
|
| 261 |
+
|
| 262 |
+
params = {
|
| 263 |
+
"api_key": os.getenv("CRYPTOCOMPARE_API_KEY"),
|
| 264 |
+
}
|
| 265 |
+
logger.debug(f"Query params: {params}")
|
| 266 |
+
|
| 267 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 268 |
+
logger.debug(f"Query headers {headers}")
|
| 269 |
+
|
| 270 |
+
try:
|
| 271 |
+
with httpx.Client(timeout=30.0) as client:
|
| 272 |
+
response = client.get(url, params=params, headers=headers)
|
| 273 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 274 |
+
return response.json()
|
| 275 |
+
except httpx.HTTPError as e:
|
| 276 |
+
logger.debug(f"An error occurred while making the request: {e}")
|
| 277 |
+
# print(f"An error occurred while making the request: {e}")
|
| 278 |
+
return None
|
| 279 |
+
|
| 280 |
+
# @redis_cache(ttl=ONE_MONTH_IN_SECONDS)
|
| 281 |
+
@logger.instrument()
|
| 282 |
+
def get_top_tier_exchanges_list(
|
| 283 |
+
self,
|
| 284 |
+
top_tier: bool = False,
|
| 285 |
+
cache_ttl: int = None
|
| 286 |
+
) -> dict:
|
| 287 |
+
"""
|
| 288 |
+
Retrieves a list of top-tier exchanges from the CryptoCompare API.
|
| 289 |
+
|
| 290 |
+
Args:
|
| 291 |
+
top_tier (bool, optional): A boolean flag indicating whether to retrieve top-tier exchanges. Defaults to False.
|
| 292 |
+
cache_ttl (int, optional): The time-to-live (TTL) for the cached response. Defaults to None.
|
| 293 |
+
|
| 294 |
+
Returns:
|
| 295 |
+
dict: A dictionary containing a list of top-tier exchanges from the CryptoCompare API.
|
| 296 |
+
|
| 297 |
+
Raises:
|
| 298 |
+
httpx.HTTPError: If an error occurs while making the request to the CryptoCompare API.
|
| 299 |
+
|
| 300 |
+
Example:
|
| 301 |
+
# Get a list of top-tier exchanges
|
| 302 |
+
top_tier_exchanges = crypto_compare_instance.get_top_tier_exchanges_list(top_tier=True)
|
| 303 |
+
"""
|
| 304 |
+
url = f"{self.CRYPTO_COMPARE_API_BASE_URL}data/v4/all/exchanges"
|
| 305 |
+
logger.debug(f"Query URL: {url}")
|
| 306 |
+
|
| 307 |
+
params = {
|
| 308 |
+
"api_key": os.getenv("CRYPTOCOMPARE_API_KEY"),
|
| 309 |
+
"topTier": 'true' if top_tier else 'false',
|
| 310 |
+
}
|
| 311 |
+
logger.debug(f"Query params: {params}")
|
| 312 |
+
|
| 313 |
+
headers = {"Content-type":"application/json; charset=UTF-8"}
|
| 314 |
+
logger.debug(f"Query headers {headers}")
|
| 315 |
+
|
| 316 |
+
try:
|
| 317 |
+
with httpx.Client(timeout=30.0) as client:
|
| 318 |
+
response = client.get(url, params=params, headers=headers)
|
| 319 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 320 |
+
return response.json()
|
| 321 |
+
except httpx.HTTPError as e:
|
| 322 |
+
logger.debug(f"An error occurred while making the request: {e}")
|
| 323 |
+
# print(f"An error occurred while making the request: {e}")
|
| 324 |
+
return None
|
| 325 |
+
|
src/data_sources/dexscreener.py
ADDED
|
@@ -0,0 +1,106 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dotenv import load_dotenv
|
| 2 |
+
|
| 3 |
+
import httpx
|
| 4 |
+
from typing import List, Union
|
| 5 |
+
from src.databases.redis import REDIS_CACHED
|
| 6 |
+
from src.libs.constants import ONE_HOUR_IN_SECONDS, ONE_MINUTE_IN_SECONDS
|
| 7 |
+
from src.libs.constants import DEX_SCREENER_BASE_URL
|
| 8 |
+
|
| 9 |
+
load_dotenv()
|
| 10 |
+
|
| 11 |
+
redis_cache = REDIS_CACHED
|
| 12 |
+
|
| 13 |
+
class DexScreener:
|
| 14 |
+
"""
|
| 15 |
+
A class for interacting with the Dex Screener API.
|
| 16 |
+
|
| 17 |
+
Attributes:
|
| 18 |
+
DEX_SCREENER_BASE_URL (str): The base URL for the Dex Screener API.
|
| 19 |
+
|
| 20 |
+
Methods:
|
| 21 |
+
__init__(self, base_url: str) -> None:
|
| 22 |
+
Initialize the DexScreener class.
|
| 23 |
+
|
| 24 |
+
get_pairs(self, chain_id: str, pair_addresses: str) -> dict:
|
| 25 |
+
|
| 26 |
+
get_tokens(self, token_addresses: Union[str, List[str]]) -> dict:
|
| 27 |
+
|
| 28 |
+
search_pairs(self, query: str) -> dict:
|
| 29 |
+
"""
|
| 30 |
+
def __init__(self, base_url: str = None) -> None:
|
| 31 |
+
self.DEX_SCREENER_BASE_URL = base_url or DEX_SCREENER_BASE_URL
|
| 32 |
+
|
| 33 |
+
@redis_cache(ttl=ONE_MINUTE_IN_SECONDS)
|
| 34 |
+
def get_pairs(self, chain_id: str, pair_addresses: str) -> dict:
|
| 35 |
+
"""
|
| 36 |
+
This method is used to fetch pair data from Dex Screener API based on the provided chain_id and pair_addresses.
|
| 37 |
+
|
| 38 |
+
Parameters:
|
| 39 |
+
chain_id (str): The ID of the blockchain network.
|
| 40 |
+
pair_addresses (str): The address(es) of the pair(s) on the blockchain.
|
| 41 |
+
|
| 42 |
+
Returns:
|
| 43 |
+
dict: A dictionary containing the JSON response from the API. Returns None if an error occurs.
|
| 44 |
+
|
| 45 |
+
Raises:
|
| 46 |
+
httpx.HTTPError: If an error occurs while making the request to the API.
|
| 47 |
+
"""
|
| 48 |
+
url = f"{self.DEX_SCREENER_BASE_URL}pairs/{chain_id}/{pair_addresses}"
|
| 49 |
+
try:
|
| 50 |
+
response = httpx.get(url)
|
| 51 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 52 |
+
return response.json()
|
| 53 |
+
except httpx.HTTPError as e:
|
| 54 |
+
print(f"An error occurred while making the request: {e}")
|
| 55 |
+
return None
|
| 56 |
+
|
| 57 |
+
@redis_cache(ttl=ONE_MINUTE_IN_SECONDS)
|
| 58 |
+
def get_tokens(self, token_addresses: Union[str, List[str]]) -> dict:
|
| 59 |
+
"""
|
| 60 |
+
This method is used to fetch token data from Dex Screener API based on the provided token_addresses.
|
| 61 |
+
|
| 62 |
+
Parameters:
|
| 63 |
+
token_addresses (Union[str, List[str]]): The address(es) of the token(s) on the blockchain.
|
| 64 |
+
This parameter can be a single address (str) or a list of addresses (List[str]).
|
| 65 |
+
|
| 66 |
+
Returns:
|
| 67 |
+
dict: A dictionary containing the JSON response from the API. Returns None if an error occurs.
|
| 68 |
+
|
| 69 |
+
Raises:
|
| 70 |
+
httpx.HTTPError: If an error occurs while making the request to the API.
|
| 71 |
+
"""
|
| 72 |
+
if isinstance(token_addresses, list):
|
| 73 |
+
token_addresses = ','.join(token_addresses)
|
| 74 |
+
|
| 75 |
+
url = f"{self.DEX_SCREENER_BASE_URL}tokens/{token_addresses}"
|
| 76 |
+
try:
|
| 77 |
+
response = httpx.get(url)
|
| 78 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 79 |
+
return response.json()
|
| 80 |
+
except httpx.HTTPError as e:
|
| 81 |
+
print(f"An error occurred while making the request: {e}")
|
| 82 |
+
return None
|
| 83 |
+
|
| 84 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 85 |
+
def search(self, query: str) -> dict:
|
| 86 |
+
"""
|
| 87 |
+
This method is used to search for pairs matching the provided query from Dex Screener API.
|
| 88 |
+
The query may include pair address, token address, token name, or token symbol.
|
| 89 |
+
|
| 90 |
+
Parameters:
|
| 91 |
+
query (str): The search query.
|
| 92 |
+
|
| 93 |
+
Returns:
|
| 94 |
+
dict: A dictionary containing the JSON response from the API. Returns None if an error occurs.
|
| 95 |
+
|
| 96 |
+
Raises:
|
| 97 |
+
httpx.HTTPError: If an error occurs while making the request to the API.
|
| 98 |
+
"""
|
| 99 |
+
url = f"{self.DEX_SCREENER_BASE_URL}search/?q={query}"
|
| 100 |
+
try:
|
| 101 |
+
response = httpx.get(url)
|
| 102 |
+
response.raise_for_status() # Raise an exception if the request was unsuccessful
|
| 103 |
+
return response.json()
|
| 104 |
+
except httpx.HTTPError as e:
|
| 105 |
+
print(f"An error occurred while making the request: {e}")
|
| 106 |
+
return None
|
src/databases/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
src/databases/postgres.py
ADDED
|
@@ -0,0 +1,4 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from sqlalchemy import create_engine
|
| 3 |
+
|
| 4 |
+
sqlalchemy_engine = create_engine(url=os.getenv('POSTGRES_CONNECTION_STRING'))
|
src/databases/redis.py
ADDED
|
@@ -0,0 +1,132 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import json
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
from upstash_redis import Redis
|
| 5 |
+
from src.libs.logger import logger
|
| 6 |
+
from src.libs.helper_functions import chunk_data, create_uuid_from_string
|
| 7 |
+
load_dotenv()
|
| 8 |
+
|
| 9 |
+
url = os.getenv("UPSTASH_REDIS_REST_URL")
|
| 10 |
+
token = os.getenv("UPSTASH_REDIS_REST_TOKEN")
|
| 11 |
+
|
| 12 |
+
REDIS = Redis(url=url, token=token)
|
| 13 |
+
REDIS_PIPELINE = REDIS.pipeline()
|
| 14 |
+
|
| 15 |
+
|
| 16 |
+
@logger.instrument()
|
| 17 |
+
def args_to_key(*args, **kwargs):
|
| 18 |
+
"""
|
| 19 |
+
This function generates a unique key based on the provided arguments and keyword arguments.
|
| 20 |
+
|
| 21 |
+
Args:
|
| 22 |
+
*args: A variable number of positional arguments.
|
| 23 |
+
**kwargs: A variable number of keyword arguments.
|
| 24 |
+
|
| 25 |
+
Returns:
|
| 26 |
+
str: A unique key string generated from the provided arguments and keyword arguments.
|
| 27 |
+
|
| 28 |
+
Example:
|
| 29 |
+
>>> args_to_key("hello", "world", a=1, b=2)
|
| 30 |
+
'hello_world_a_1_b_2'
|
| 31 |
+
"""
|
| 32 |
+
params = []
|
| 33 |
+
|
| 34 |
+
# TODO: turn these into debugging logs
|
| 35 |
+
logger.debug(f"args: {args}")
|
| 36 |
+
logger.debug(f"kwargs: {kwargs}")
|
| 37 |
+
|
| 38 |
+
# Append the names of callable arguments to the params list
|
| 39 |
+
for arg in args:
|
| 40 |
+
if callable(arg):
|
| 41 |
+
params.append(arg.__name__)
|
| 42 |
+
|
| 43 |
+
# Append the string representations of non-callable arguments and keyword arguments to the params list
|
| 44 |
+
for arg in args:
|
| 45 |
+
if not callable(arg):
|
| 46 |
+
params.append(cast_args_to_string_and_return_first_index(arg))
|
| 47 |
+
|
| 48 |
+
for kwarg in kwargs.values():
|
| 49 |
+
params.append(str(kwarg))
|
| 50 |
+
|
| 51 |
+
# Join the elements in the params list using the '_' character as a separator
|
| 52 |
+
return "_".join(params)
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
@logger.instrument()
|
| 56 |
+
def REDIS_CACHED(ttl: int = 3600, chunk: bool = False):
|
| 57 |
+
"""
|
| 58 |
+
This decorator caches the result of a function call in Redis.
|
| 59 |
+
|
| 60 |
+
Args:
|
| 61 |
+
ttl (int): The time-to-live (in seconds) for the cached result. Defaults to 3600 seconds (1 hour).
|
| 62 |
+
chunk (bool): Whether to chunk the result of the original function call. Defaults to False.
|
| 63 |
+
|
| 64 |
+
Returns:
|
| 65 |
+
A wrapper function that caches the result of the original function call.
|
| 66 |
+
|
| 67 |
+
Example:
|
| 68 |
+
>>> @REDIS_CACHED(ttl=60, chunk=True)
|
| 69 |
+
... def example_function(arg1, arg2):
|
| 70 |
+
... return arg1 + arg2
|
| 71 |
+
...
|
| 72 |
+
... cached_result = example_function(3, 4)
|
| 73 |
+
... print(cached_result) # Output: 7
|
| 74 |
+
... print(cached_result) # Output: 7 (from cache)
|
| 75 |
+
"""
|
| 76 |
+
def decorator(func):
|
| 77 |
+
def wrapper(*args, **kwargs):
|
| 78 |
+
r = REDIS
|
| 79 |
+
|
| 80 |
+
cache_key = args_to_key(func, *args, **kwargs)
|
| 81 |
+
logger.debug(f"Cache key: {cache_key}") # TODO: turn these into debugging logs
|
| 82 |
+
cache_key = str(create_uuid_from_string(cache_key))
|
| 83 |
+
logger.debug(f"Cache key: {cache_key}") # TODO: turn these into debugging logs
|
| 84 |
+
|
| 85 |
+
# Test if a matching cache key exists
|
| 86 |
+
cached = r.get(cache_key)
|
| 87 |
+
if cached:
|
| 88 |
+
# Found in cache, return it
|
| 89 |
+
return json.loads(cached)
|
| 90 |
+
|
| 91 |
+
# Otherwise, pass everything to the downstream function
|
| 92 |
+
result = func(*args, **kwargs)
|
| 93 |
+
|
| 94 |
+
# Set cache time-to-live duration
|
| 95 |
+
# Use the default TTL if not provided as an argument
|
| 96 |
+
cache_ttl = kwargs.get('cache_ttl')
|
| 97 |
+
ttl_seconds = cache_ttl or kwargs.get('ttl', ttl)
|
| 98 |
+
|
| 99 |
+
if chunk:
|
| 100 |
+
chunked_result = chunk_data(result.data, 100)
|
| 101 |
+
for i in range(len(chunked_result)):
|
| 102 |
+
r.rpush(cache_key, json.dumps(chunked_result[i]))
|
| 103 |
+
r.pexpire(cache_key, ttl_seconds)
|
| 104 |
+
else:
|
| 105 |
+
# Put the result from downstream function into cache, with a TTL
|
| 106 |
+
# So next call with the same parameters will be handled by the cache
|
| 107 |
+
r.setex(cache_key, ttl_seconds, result)
|
| 108 |
+
|
| 109 |
+
# Return the result transparently
|
| 110 |
+
return result
|
| 111 |
+
return wrapper
|
| 112 |
+
return decorator
|
| 113 |
+
|
| 114 |
+
|
| 115 |
+
@logger.instrument()
|
| 116 |
+
def cast_args_to_string_and_return_first_index(args):
|
| 117 |
+
"""
|
| 118 |
+
This function takes an argument, casts it to a string, removes '<' and '>', splits it by spaces, and returns the first index.
|
| 119 |
+
|
| 120 |
+
Args:
|
| 121 |
+
args (any): The argument to be processed.
|
| 122 |
+
|
| 123 |
+
Returns:
|
| 124 |
+
str or None: The first index of the argument after processing, or None if the argument is empty.
|
| 125 |
+
|
| 126 |
+
Example:
|
| 127 |
+
>>> cast_args_to_string_and_return_first_index("hello <world>")
|
| 128 |
+
'hello'
|
| 129 |
+
"""
|
| 130 |
+
args_str = str(args).strip('<>').split(' ')
|
| 131 |
+
return args_str[0] if args_str else None
|
| 132 |
+
|
src/knowledge_bases/.DS_Store
ADDED
|
Binary file (6.15 kB). View file
|
|
|
src/knowledge_bases/combined.py
ADDED
|
@@ -0,0 +1,60 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from phi.knowledge.combined import CombinedKnowledgeBase
|
| 2 |
+
from phi.vectordb.pgvector import PgVector2
|
| 3 |
+
from src.knowledge_bases.json import json_knowledge_base
|
| 4 |
+
from src.knowledge_bases.pdf import pdf_knowledge_base
|
| 5 |
+
from phi.knowledge.pdf import PDFUrlKnowledgeBase
|
| 6 |
+
from phi.knowledge.website import WebsiteKnowledgeBase
|
| 7 |
+
from src.databases.postgres import sqlalchemy_engine
|
| 8 |
+
from src.libs.helper_functions import get_pdf_urls, get_pdf_urls_with_root_url
|
| 9 |
+
from src.libs.google_drive import get_drive_file_urls, get_pdf_urls_with_root_url
|
| 10 |
+
from phi.embedder.ollama import OllamaEmbedder
|
| 11 |
+
|
| 12 |
+
# Configuration for Google Drive API
|
| 13 |
+
# service_account_file = 'src/config/telzho-430515-c2622883c30e.json'
|
| 14 |
+
# folder_id = '1veFNWFu68qO9dDPxouLSdB2pHGunosar'
|
| 15 |
+
|
| 16 |
+
# Get file URLs from Google Drive folder
|
| 17 |
+
# pdf_urls = get_drive_file_urls(service_account_file, folder_id)
|
| 18 |
+
|
| 19 |
+
# pdf_urls = get_pdf_urls_with_root_url(
|
| 20 |
+
# "https://swiftlynxtechnologies.com/Rhapsody/",
|
| 21 |
+
# "https://swiftlynxtechnologies.com/Rhapsody"
|
| 22 |
+
# )
|
| 23 |
+
# print(pdf_urls)
|
| 24 |
+
|
| 25 |
+
pdf_urls = get_pdf_urls_with_root_url("https://swiftlynxtechnologies.com/Rhapsody/")
|
| 26 |
+
print(pdf_urls)
|
| 27 |
+
|
| 28 |
+
knowledge_base = CombinedKnowledgeBase(
|
| 29 |
+
sources=[
|
| 30 |
+
json_knowledge_base,
|
| 31 |
+
# pdf_knowledge_base
|
| 32 |
+
# PDFUrlKnowledgeBase(
|
| 33 |
+
# urls=pdf_urls,
|
| 34 |
+
# # Table name: pastor_chris_library
|
| 35 |
+
# vector_db=PgVector2(
|
| 36 |
+
# collection="pastor_chris_library",
|
| 37 |
+
# db_engine=sqlalchemy_engine,
|
| 38 |
+
# embedder=OllamaEmbedder()
|
| 39 |
+
# ),
|
| 40 |
+
# ),
|
| 41 |
+
# WebsiteKnowledgeBase(
|
| 42 |
+
# urls=[''],
|
| 43 |
+
# # Number of links to follow from the seed URLs
|
| 44 |
+
# max_links=10,
|
| 45 |
+
# # Table name: pastor_chris_library
|
| 46 |
+
# vector_db=PgVector2(
|
| 47 |
+
# collection="pastor_chris_library",
|
| 48 |
+
# db_engine=sqlalchemy_engine,
|
| 49 |
+
# ),
|
| 50 |
+
# ),
|
| 51 |
+
# website_knowledge_base,
|
| 52 |
+
# local_pdf_knowledge_base,
|
| 53 |
+
],
|
| 54 |
+
vector_db=PgVector2(
|
| 55 |
+
# Table name: ai.combined_documents
|
| 56 |
+
collection="combined_documents",
|
| 57 |
+
db_engine=sqlalchemy_engine,
|
| 58 |
+
embedder=OllamaEmbedder()
|
| 59 |
+
),
|
| 60 |
+
)
|
src/knowledge_bases/json.py
ADDED
|
@@ -0,0 +1,63 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from fs_s3fs import S3FS
|
| 3 |
+
from src.libs.logger import logger
|
| 4 |
+
from src.libs.s3fs import get_s3_credentials
|
| 5 |
+
from phi.vectordb.pgvector import PgVector2
|
| 6 |
+
from phi.knowledge.json import JSONKnowledgeBase
|
| 7 |
+
from src.databases.postgres import sqlalchemy_engine
|
| 8 |
+
from phi.embedder.ollama import OllamaEmbedder
|
| 9 |
+
|
| 10 |
+
class JSONKnowledgeBaseExtended(JSONKnowledgeBase):
|
| 11 |
+
s3fs: S3FS = None # Explicitly declare the s3fs attribute
|
| 12 |
+
|
| 13 |
+
def __init__(
|
| 14 |
+
self,
|
| 15 |
+
s3_bucket_name,
|
| 16 |
+
vector_db,
|
| 17 |
+
s3_access_key_id,
|
| 18 |
+
s3_secret_access_key,
|
| 19 |
+
s3_endpoint_url,
|
| 20 |
+
s3_region,
|
| 21 |
+
):
|
| 22 |
+
super().__init__(path=s3_bucket_name, vector_db=vector_db, bucket_name = s3_bucket_name)
|
| 23 |
+
|
| 24 |
+
# Initialize the S3 filesystem
|
| 25 |
+
self.s3fs = S3FS(
|
| 26 |
+
bucket_name=s3_bucket_name,
|
| 27 |
+
aws_access_key_id=s3_access_key_id,
|
| 28 |
+
aws_secret_access_key=s3_secret_access_key,
|
| 29 |
+
endpoint_url = s3_endpoint_url,
|
| 30 |
+
region = s3_region,
|
| 31 |
+
)
|
| 32 |
+
|
| 33 |
+
def load_knowledge_base(self, recreate: bool = False):
|
| 34 |
+
json_knowledge_base.load(recreate=recreate)
|
| 35 |
+
|
| 36 |
+
def store_json_data_in_s3(self, json_data, file_path):
|
| 37 |
+
if file_path[0] == '/':
|
| 38 |
+
file_path = f"/json-data/{file_path[1:]}"
|
| 39 |
+
else:
|
| 40 |
+
file_path = f"/json-data/{file_path}"
|
| 41 |
+
|
| 42 |
+
logger.info(f"Storing JSON data in S3 bucket: {self.s3fs._bucket_name} at path: {file_path}")
|
| 43 |
+
|
| 44 |
+
# Open the file in write mode and write the JSON data
|
| 45 |
+
self.s3fs.open(path = f"/{file_path}", mode = 'w').write(json.dumps(json_data, indent=2))
|
| 46 |
+
return True
|
| 47 |
+
|
| 48 |
+
|
| 49 |
+
# S3 credentials
|
| 50 |
+
_s3_credendtials = get_s3_credentials()
|
| 51 |
+
_json_knowledge_base_arguments = {
|
| 52 |
+
"vector_db": PgVector2(
|
| 53 |
+
collection="json_documents",
|
| 54 |
+
db_engine=sqlalchemy_engine,
|
| 55 |
+
embedder=OllamaEmbedder()
|
| 56 |
+
),
|
| 57 |
+
**_s3_credendtials
|
| 58 |
+
}
|
| 59 |
+
|
| 60 |
+
# Initialize the extended JSONKnowledgeBase with the S3 bucket name and S3 credentials
|
| 61 |
+
json_knowledge_base = JSONKnowledgeBaseExtended(
|
| 62 |
+
**_json_knowledge_base_arguments
|
| 63 |
+
)
|
src/knowledge_bases/pdf.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from fs_s3fs import S3FS
|
| 3 |
+
from src.libs.logger import logger
|
| 4 |
+
from src.libs.s3fs import get_s3_credentials
|
| 5 |
+
from phi.vectordb.pgvector import PgVector2
|
| 6 |
+
from phi.knowledge.json import JSONKnowledgeBase
|
| 7 |
+
from phi.knowledge.pdf import PDFUrlKnowledgeBase
|
| 8 |
+
from src.databases.postgres import sqlalchemy_engine
|
| 9 |
+
from phi.embedder.ollama import OllamaEmbedder
|
| 10 |
+
|
| 11 |
+
|
| 12 |
+
class PDFUrlKnowledgeBaseExtended(PDFUrlKnowledgeBase):
|
| 13 |
+
s3fs: S3FS = None # Explicitly declare the s3fs attribute
|
| 14 |
+
|
| 15 |
+
def __init__(
|
| 16 |
+
self,
|
| 17 |
+
s3_bucket_name,
|
| 18 |
+
vector_db,
|
| 19 |
+
s3_access_key_id,
|
| 20 |
+
s3_secret_access_key,
|
| 21 |
+
s3_endpoint_url,
|
| 22 |
+
s3_region,
|
| 23 |
+
):
|
| 24 |
+
super().__init__(path=s3_bucket_name, vector_db=vector_db, bucket_name=s3_bucket_name)
|
| 25 |
+
|
| 26 |
+
# Initialize the S3 filesystem
|
| 27 |
+
self.s3fs = S3FS(
|
| 28 |
+
bucket_name=s3_bucket_name,
|
| 29 |
+
aws_access_key_id=s3_access_key_id,
|
| 30 |
+
aws_secret_access_key=s3_secret_access_key,
|
| 31 |
+
endpoint_url=s3_endpoint_url,
|
| 32 |
+
region=s3_region,
|
| 33 |
+
)
|
| 34 |
+
|
| 35 |
+
def load_knowledge_base(self, recreate: bool = False):
|
| 36 |
+
pdf_knowledge_base.load(recreate=recreate)
|
| 37 |
+
|
| 38 |
+
def chunk_and_store_to_vector_db(self, urls):
|
| 39 |
+
pdf_knowledge_base.urls = urls
|
| 40 |
+
|
| 41 |
+
|
| 42 |
+
# S3 credentials
|
| 43 |
+
_s3_credendtials = get_s3_credentials()
|
| 44 |
+
_pdf_knowledge_base_arguments = {
|
| 45 |
+
"vector_db": PgVector2(
|
| 46 |
+
collection="pdf_documents",
|
| 47 |
+
db_engine=sqlalchemy_engine,
|
| 48 |
+
embedder=OllamaEmbedder()
|
| 49 |
+
|
| 50 |
+
),
|
| 51 |
+
**_s3_credendtials
|
| 52 |
+
}
|
| 53 |
+
|
| 54 |
+
# Initialize the extended PDFKnowledgeBase with the S3 bucket name and S3 credentials
|
| 55 |
+
pdf_knowledge_base = PDFUrlKnowledgeBaseExtended(
|
| 56 |
+
**_pdf_knowledge_base_arguments
|
| 57 |
+
)
|
src/knowledge_bases/pipeline.py
ADDED
|
@@ -0,0 +1,48 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from chainlit import make_async
|
| 2 |
+
from src.data_sources.cryptocompare import CryptoCompare
|
| 3 |
+
from src.data_sources.coin_gecko import CoinGecko
|
| 4 |
+
from src.knowledge_bases.json import json_knowledge_base
|
| 5 |
+
|
| 6 |
+
coin_gecko = CoinGecko()
|
| 7 |
+
crypto_compare = CryptoCompare()
|
| 8 |
+
|
| 9 |
+
store_json_data_in_s3 = make_async(json_knowledge_base.store_json_data_in_s3)
|
| 10 |
+
|
| 11 |
+
async def fetch_and_load_crypto_compare_json_data():
|
| 12 |
+
get_all_coins = make_async(crypto_compare.get_all_coins)
|
| 13 |
+
get_news_categories = make_async(crypto_compare.get_news_categories)
|
| 14 |
+
get_top_tier_exchanges_list = make_async(crypto_compare.get_top_tier_exchanges_list)
|
| 15 |
+
get_all_wallets_general_info = make_async(crypto_compare.get_all_wallets_general_info)
|
| 16 |
+
get_all_exchanges_general_info = make_async(crypto_compare.get_all_exchanges_general_info)
|
| 17 |
+
|
| 18 |
+
all_coins = await get_all_coins()
|
| 19 |
+
news_categories = await get_news_categories()
|
| 20 |
+
top_tier_exchanges_list = await get_top_tier_exchanges_list()
|
| 21 |
+
all_wallets_general_info = await get_all_wallets_general_info()
|
| 22 |
+
all_exchanges_general_info = await get_all_exchanges_general_info()
|
| 23 |
+
|
| 24 |
+
await store_json_data_in_s3(all_coins, "crypto_compare_all_coins.json")
|
| 25 |
+
await store_json_data_in_s3(news_categories, "crypto_compare_news_categories.json")
|
| 26 |
+
await store_json_data_in_s3(top_tier_exchanges_list, "crypto_compare_top_tier_exchanges_list.json")
|
| 27 |
+
await store_json_data_in_s3(all_wallets_general_info, "crypto_compare_all_wallets_general_info.json")
|
| 28 |
+
await store_json_data_in_s3(all_exchanges_general_info, "crypto_compare_all_exchanges_general_info.json")
|
| 29 |
+
|
| 30 |
+
async def fetch_and_load_coin_gecko_json_data():
|
| 31 |
+
get_all_coins = make_async(coin_gecko.get_exchanges_list)
|
| 32 |
+
get_asset_platforms_list = make_async(coin_gecko.get_asset_platforms_list)
|
| 33 |
+
|
| 34 |
+
all_exchange_data = await get_all_coins()
|
| 35 |
+
asset_platforms_list = await get_asset_platforms_list()
|
| 36 |
+
|
| 37 |
+
await store_json_data_in_s3(all_exchange_data, "coin_gecko_all_exchange_data")
|
| 38 |
+
await store_json_data_in_s3(asset_platforms_list, "coin_gecko_asset_platforms_list")
|
| 39 |
+
|
| 40 |
+
async def load_json_data_into_knowledge_base():
|
| 41 |
+
load_knowledge_base = make_async(json_knowledge_base.load_knowledge_base)
|
| 42 |
+
|
| 43 |
+
await load_knowledge_base(recreate=True)
|
| 44 |
+
|
| 45 |
+
async def main():
|
| 46 |
+
await fetch_and_load_coin_gecko_json_data()
|
| 47 |
+
await fetch_and_load_crypto_compare_json_data()
|
| 48 |
+
await load_json_data_into_knowledge_base()
|
src/knowledge_bases/store_data.py
ADDED
|
@@ -0,0 +1,10 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from src.data_sources.cryptocompare import CryptoCompare
|
| 2 |
+
from src.libs.helper_functions import store_json_data
|
| 3 |
+
from src.libs.logger import logger
|
| 4 |
+
|
| 5 |
+
ccp = CryptoCompare()
|
| 6 |
+
|
| 7 |
+
def store_coin_data(id: str):
|
| 8 |
+
result = ccp.get_overall_coin_data(symbol=id)
|
| 9 |
+
store_json_data(result, f"data/coin_data/{id}.json")
|
| 10 |
+
logger.debug(f"Data stored for {id}")
|
src/libs/constants.py
ADDED
|
@@ -0,0 +1,70 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
"""
|
| 2 |
+
Constants representing the number of seconds in a quarter.
|
| 3 |
+
"""
|
| 4 |
+
ONE_QUARTER_IN_SECONDS : int = (86400 * 30) * 3
|
| 5 |
+
|
| 6 |
+
"""
|
| 7 |
+
Constants representing the number of seconds in a month.
|
| 8 |
+
"""
|
| 9 |
+
ONE_MONTH_IN_SECONDS : int = 86400 * 30
|
| 10 |
+
|
| 11 |
+
"""
|
| 12 |
+
Constants representing the number of seconds in a week.
|
| 13 |
+
"""
|
| 14 |
+
ONE_WEEK_IN_SECONDS : int = 604800
|
| 15 |
+
|
| 16 |
+
"""
|
| 17 |
+
Constants representing the number of seconds in a day.
|
| 18 |
+
"""
|
| 19 |
+
ONE_DAY_IN_SECONDS : int = 86400
|
| 20 |
+
|
| 21 |
+
"""
|
| 22 |
+
Constant representing the number of seconds in one hour.
|
| 23 |
+
"""
|
| 24 |
+
ONE_HOUR_IN_SECONDS : int = 3600
|
| 25 |
+
|
| 26 |
+
"""
|
| 27 |
+
Constant representing the number of seconds in one minute.
|
| 28 |
+
"""
|
| 29 |
+
ONE_MINUTE_IN_SECONDS : int = 60
|
| 30 |
+
|
| 31 |
+
"""
|
| 32 |
+
This constant is used to represent the base URL for DexScreener API.
|
| 33 |
+
|
| 34 |
+
DEX_SCREENER_BASE_URL : str
|
| 35 |
+
"""
|
| 36 |
+
DEX_SCREENER_BASE_URL : str = "https://api.dexscreener.com/latest/dex/"
|
| 37 |
+
|
| 38 |
+
"""
|
| 39 |
+
This constant is used to represent the base endpoint for Jina Search API.
|
| 40 |
+
|
| 41 |
+
JINA_SEARCH_BASE_ENDPOINT : str
|
| 42 |
+
"""
|
| 43 |
+
JINA_SEARCH_BASE_ENDPOINT : str = "https://s.jina.ai/"
|
| 44 |
+
|
| 45 |
+
"""
|
| 46 |
+
This constant is used to represent the base endpoint for Jina Reader API.
|
| 47 |
+
|
| 48 |
+
JINA_READER_BASE_ENDPOINT : str
|
| 49 |
+
"""
|
| 50 |
+
JINA_READER_BASE_ENDPOINT : str = "https://r.jina.ai/"
|
| 51 |
+
|
| 52 |
+
"""
|
| 53 |
+
This constant is used to represent the base URL for CryptoCompare API.
|
| 54 |
+
|
| 55 |
+
CRYPTO_COMPARE_BASE_URL: str
|
| 56 |
+
"""
|
| 57 |
+
CRYPTO_COMPARE_API_BASE_URL: str = "https://min-api.cryptocompare.com/"
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
"""
|
| 61 |
+
This constant is used to represent the base URL for CryptoCompare asset data API.
|
| 62 |
+
|
| 63 |
+
CRYPTO_COMPARE_ASSET_DATA_API_BASE_URL: str
|
| 64 |
+
"""
|
| 65 |
+
CRYPTO_COMPARE_ASSET_DATA_API_BASE_URL : str = "https://data-api.cryptocompare.com/asset/v1/"
|
| 66 |
+
|
| 67 |
+
|
| 68 |
+
# TTLs
|
| 69 |
+
|
| 70 |
+
SEARCH_DATA_TTL : int = ONE_WEEK_IN_SECONDS
|
src/libs/google_drive.py
ADDED
|
@@ -0,0 +1,54 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import json
|
| 2 |
+
from google.oauth2 import service_account
|
| 3 |
+
from googleapiclient.discovery import build
|
| 4 |
+
import requests
|
| 5 |
+
from bs4 import BeautifulSoup
|
| 6 |
+
|
| 7 |
+
|
| 8 |
+
def get_pdf_urls_with_root_url(root_url):
|
| 9 |
+
"""
|
| 10 |
+
Returns an array of URLs of PDF files in the specified remote directory.
|
| 11 |
+
|
| 12 |
+
Parameters:
|
| 13 |
+
root_url (str): The root URL to the directory listing.
|
| 14 |
+
|
| 15 |
+
Returns:
|
| 16 |
+
list: A list of URLs to the PDF files in the directory.
|
| 17 |
+
"""
|
| 18 |
+
pdf_urls = []
|
| 19 |
+
|
| 20 |
+
# Make a request to get the content of the directory
|
| 21 |
+
response = requests.get(root_url)
|
| 22 |
+
|
| 23 |
+
if response.status_code == 200:
|
| 24 |
+
# Parse the HTML content
|
| 25 |
+
soup = BeautifulSoup(response.content, 'html.parser')
|
| 26 |
+
|
| 27 |
+
# Iterate over all links in the directory listing
|
| 28 |
+
for link in soup.find_all('a'):
|
| 29 |
+
href = link.get('href')
|
| 30 |
+
if href and href.lower().endswith('.pdf'):
|
| 31 |
+
# Construct the full URL for the PDF file
|
| 32 |
+
file_url = root_url.rstrip('/') + '/' + href.lstrip('/')
|
| 33 |
+
pdf_urls.append(file_url)
|
| 34 |
+
else:
|
| 35 |
+
print(f"Failed to access {root_url}, status code: {response.status_code}")
|
| 36 |
+
|
| 37 |
+
return pdf_urls
|
| 38 |
+
|
| 39 |
+
|
| 40 |
+
def get_drive_file_urls(service_account_file, folder_id):
|
| 41 |
+
credentials = service_account.Credentials.from_service_account_file(
|
| 42 |
+
service_account_file,
|
| 43 |
+
scopes=['https://www.googleapis.com/auth/drive.readonly'],
|
| 44 |
+
)
|
| 45 |
+
|
| 46 |
+
service = build('drive', 'v3', credentials=credentials)
|
| 47 |
+
|
| 48 |
+
query = f"'{folder_id}' in parents"
|
| 49 |
+
results = service.files().list(q=query, fields="files(id, webViewLink)").execute()
|
| 50 |
+
items = results.get('files', [])
|
| 51 |
+
|
| 52 |
+
file_urls = [item['webViewLink'] for item in items]
|
| 53 |
+
|
| 54 |
+
return file_urls
|
src/libs/helper_functions.py
ADDED
|
@@ -0,0 +1,215 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from ulid import ULID
|
| 2 |
+
from typing import Dict
|
| 3 |
+
import os, re, uuid, hashlib
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
|
| 6 |
+
load_dotenv()
|
| 7 |
+
|
| 8 |
+
def get_pdf_urls(folder_path):
|
| 9 |
+
"""
|
| 10 |
+
Returns an array of URLs of PDF files in the specified folder.
|
| 11 |
+
|
| 12 |
+
Parameters:
|
| 13 |
+
folder_path (str): The path to the folder.
|
| 14 |
+
|
| 15 |
+
Returns:
|
| 16 |
+
list: A list of URLs to the PDF files in the folder.
|
| 17 |
+
"""
|
| 18 |
+
pdf_urls = []
|
| 19 |
+
|
| 20 |
+
# Iterate over all files in the specified folder
|
| 21 |
+
for filename in os.listdir(folder_path):
|
| 22 |
+
# Check if the file is a PDF
|
| 23 |
+
if filename.lower().endswith('.pdf'):
|
| 24 |
+
# Construct the URL for the PDF file
|
| 25 |
+
file_url = os.path.join(folder_path, filename)
|
| 26 |
+
pdf_urls.append(file_url)
|
| 27 |
+
|
| 28 |
+
return pdf_urls
|
| 29 |
+
|
| 30 |
+
|
| 31 |
+
def get_pdf_urls_with_root_url(folder_path, root_url):
|
| 32 |
+
"""
|
| 33 |
+
Returns an array of URLs of PDF files in the specified folder.
|
| 34 |
+
|
| 35 |
+
Parameters:
|
| 36 |
+
folder_path (str): The path to the folder.
|
| 37 |
+
root_url (str): The root URL to construct the file URLs.
|
| 38 |
+
|
| 39 |
+
Returns:
|
| 40 |
+
list: A list of URLs to the PDF files in the folder.
|
| 41 |
+
"""
|
| 42 |
+
pdf_urls = []
|
| 43 |
+
|
| 44 |
+
# Iterate over all files in the specified folder
|
| 45 |
+
for filename in os.listdir(folder_path):
|
| 46 |
+
# Check if the file is a PDF
|
| 47 |
+
if filename.lower().endswith('.pdf'):
|
| 48 |
+
# Construct the URL for the PDF file using the root URL
|
| 49 |
+
file_url = os.path.join(root_url, filename)
|
| 50 |
+
pdf_urls.append(file_url)
|
| 51 |
+
|
| 52 |
+
return pdf_urls
|
| 53 |
+
|
| 54 |
+
|
| 55 |
+
# Example usage:
|
| 56 |
+
# folder_path = "/path/to/your/folder"
|
| 57 |
+
# print(get_pdf_urls(folder_path))
|
| 58 |
+
|
| 59 |
+
|
| 60 |
+
def chunk_data(data : list | dict, chunk_size: int):
|
| 61 |
+
"""
|
| 62 |
+
This function takes an array and a chunk size as input, and returns a new array
|
| 63 |
+
where the original array is divided into smaller chunks of the specified size.
|
| 64 |
+
|
| 65 |
+
Parameters:
|
| 66 |
+
data (list): The original data to be chunked.
|
| 67 |
+
chunk_size (int): The size of each chunk.
|
| 68 |
+
|
| 69 |
+
Returns:
|
| 70 |
+
list: A new array containing the chunks of the original array.
|
| 71 |
+
|
| 72 |
+
Example:
|
| 73 |
+
>>> chunk_array([1, 2, 3, 4, 5, 6], 2)
|
| 74 |
+
[[1, 2], [3, 4], [5, 6]]
|
| 75 |
+
"""
|
| 76 |
+
is_object = isinstance(data, dict)
|
| 77 |
+
is_array = isinstance(data, list)
|
| 78 |
+
|
| 79 |
+
if not is_object and not is_array:
|
| 80 |
+
raise TypeError("Data must be a list or a dictionary.")
|
| 81 |
+
elif is_array:
|
| 82 |
+
return [data[i:i+chunk_size] for i in range(0, len(data), chunk_size)]
|
| 83 |
+
elif is_object:
|
| 84 |
+
items = list(data.items())
|
| 85 |
+
for i in range(0, len(items), chunk_size):
|
| 86 |
+
yield dict(items[i:i + chunk_size])
|
| 87 |
+
|
| 88 |
+
|
| 89 |
+
def generate_ulid(seed: any = None) -> str:
|
| 90 |
+
"""
|
| 91 |
+
This function generates a Universally Unique Lexicographically Sortable Identifier (ULID).
|
| 92 |
+
If a seed is provided, it will be used as the basis for the ULID generation.
|
| 93 |
+
|
| 94 |
+
Parameters:
|
| 95 |
+
seed (any, optional): A value to be used as the basis for the ULID generation. Defaults to None.
|
| 96 |
+
|
| 97 |
+
Returns:
|
| 98 |
+
str: A string representing the generated ULID.
|
| 99 |
+
|
| 100 |
+
Example:
|
| 101 |
+
>>> generate_ulid()
|
| 102 |
+
'00000000-0001-0100-0000-000000000001'
|
| 103 |
+
>>> generate_ulid('example_seed')
|
| 104 |
+
'00000000-0001-0100-0000-000000000002'
|
| 105 |
+
"""
|
| 106 |
+
if seed is None:
|
| 107 |
+
ulid = ULID()
|
| 108 |
+
else:
|
| 109 |
+
ulid = ULID(seed)
|
| 110 |
+
|
| 111 |
+
return ulid.generate()
|
| 112 |
+
|
| 113 |
+
def create_uuid_from_string(val: str) -> str:
|
| 114 |
+
"""
|
| 115 |
+
This function takes a string as input and generates a UUID (Universally Unique Identifier)
|
| 116 |
+
using the input string as the basis for the hash.
|
| 117 |
+
|
| 118 |
+
Parameters:
|
| 119 |
+
val (str): The input string from which the UUID will be generated.
|
| 120 |
+
|
| 121 |
+
Returns:
|
| 122 |
+
str: A string representing the generated UUID.
|
| 123 |
+
|
| 124 |
+
Example:
|
| 125 |
+
>>> create_uuid_from_string('example_string')
|
| 126 |
+
'00000000-0001-0100-0000-000000000001'
|
| 127 |
+
"""
|
| 128 |
+
hex_string = hashlib.md5(val.encode("UTF-8")).hexdigest()
|
| 129 |
+
return str(uuid.UUID(hex=hex_string))
|
| 130 |
+
|
| 131 |
+
|
| 132 |
+
def to_snake_case(s):
|
| 133 |
+
"""
|
| 134 |
+
This function takes a string as input and converts it to snake_case format.
|
| 135 |
+
|
| 136 |
+
Parameters:
|
| 137 |
+
s (str): The input string to be converted to snake_case.
|
| 138 |
+
|
| 139 |
+
Returns:
|
| 140 |
+
str: A string in snake_case format.
|
| 141 |
+
|
| 142 |
+
Example:
|
| 143 |
+
>>> to_snake_case('FirstName')
|
| 144 |
+
'first_name'
|
| 145 |
+
"""
|
| 146 |
+
if not s:
|
| 147 |
+
return ''
|
| 148 |
+
|
| 149 |
+
# Check if the string is in all caps
|
| 150 |
+
if s.isupper():
|
| 151 |
+
return s
|
| 152 |
+
|
| 153 |
+
return '_'.join(
|
| 154 |
+
word.lower() for word in re.findall(r'[A-Z]{2,}(?=[A-Z][a-z]+[0-9]*|\b)|[A-Z]?[a-z]+[0-9]*|[A-Z]|[0-9]+', s)
|
| 155 |
+
)
|
| 156 |
+
|
| 157 |
+
def convert_to_snakecase(original_data):
|
| 158 |
+
"""
|
| 159 |
+
This function takes a dictionary or list of dictionaries as input and converts its keys to snake_case format.
|
| 160 |
+
If the input is a list of dictionaries, it will recursively convert each nested dictionary.
|
| 161 |
+
|
| 162 |
+
Parameters:
|
| 163 |
+
original_data (dict or list): The input dictionary or list of dictionaries.
|
| 164 |
+
|
| 165 |
+
Returns:
|
| 166 |
+
dict: A new dictionary with keys in snake_case format.
|
| 167 |
+
|
| 168 |
+
Example:
|
| 169 |
+
>>> convert_to_snakecase({'FirstName': 'John', 'LastName': 'Doe'})
|
| 170 |
+
{'first_name': 'John', 'last_name': 'Doe'}
|
| 171 |
+
>>> convert_to_snakecase([{'FirstName': 'Jane', 'LastName': 'Smith'}, {'FirstName': 'Bob', 'LastName': 'Johnson'}])
|
| 172 |
+
[{'first_name': 'Jane', 'last_name': 'Smith'}, {'first_name': 'Bob', 'last_name': 'Johnson'}]
|
| 173 |
+
"""
|
| 174 |
+
if isinstance(original_data, dict):
|
| 175 |
+
transformed_dict = {}
|
| 176 |
+
for k, v in original_data.items():
|
| 177 |
+
new_key = to_snake_case(k)
|
| 178 |
+
if isinstance(v, (dict, list)):
|
| 179 |
+
transformed_dict[new_key] = convert_to_snakecase(v)
|
| 180 |
+
else:
|
| 181 |
+
transformed_dict[new_key] = v
|
| 182 |
+
return transformed_dict
|
| 183 |
+
elif isinstance(original_data, list):
|
| 184 |
+
return [convert_to_snakecase(item) for item in original_data]
|
| 185 |
+
else:
|
| 186 |
+
raise TypeError("Input must be a dictionary or a list of dictionaries.")
|
| 187 |
+
|
| 188 |
+
import json
|
| 189 |
+
|
| 190 |
+
def store_json_data(data, output_file):
|
| 191 |
+
try:
|
| 192 |
+
# Create directory if it doesn't exist
|
| 193 |
+
os.makedirs(os.path.dirname(output_file), exist_ok=True)
|
| 194 |
+
|
| 195 |
+
# Write the JSON data to a file
|
| 196 |
+
with open(output_file, 'w') as file:
|
| 197 |
+
json.dump(data, file, indent=4)
|
| 198 |
+
|
| 199 |
+
print(f"Data successfully stored in {output_file}")
|
| 200 |
+
except Exception as e:
|
| 201 |
+
print(f"An error occurred: {e}")
|
| 202 |
+
|
| 203 |
+
def get_headers() -> Dict[str, str]:
|
| 204 |
+
"""
|
| 205 |
+
Returns the headers for the requests including the API key.
|
| 206 |
+
"""
|
| 207 |
+
return {
|
| 208 |
+
'0x-api-key': f"{os.getenv('OX_API_KEY')}"
|
| 209 |
+
}
|
| 210 |
+
|
| 211 |
+
def get_private_key() -> str:
|
| 212 |
+
"""
|
| 213 |
+
Returns a private key.
|
| 214 |
+
"""
|
| 215 |
+
return os.getenv('WALLET_PRIVATE_KEY')
|
src/libs/logger.py
ADDED
|
@@ -0,0 +1,17 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from dotenv import load_dotenv
|
| 3 |
+
|
| 4 |
+
import logfire as lf
|
| 5 |
+
|
| 6 |
+
load_dotenv()
|
| 7 |
+
|
| 8 |
+
lf.configure(
|
| 9 |
+
token=os.getenv('LOGFIRE_TOKEN'),
|
| 10 |
+
# pydantic_plugin=lf.PydanticPlugin(record='all'),
|
| 11 |
+
console=lf.ConsoleOptions(min_log_level= os.getenv('LOG_LEVEL'))
|
| 12 |
+
)
|
| 13 |
+
lf.instrument_redis()
|
| 14 |
+
lf.instrument_httpx()
|
| 15 |
+
lf.instrument_requests()
|
| 16 |
+
|
| 17 |
+
logger = lf
|
src/libs/rpc_client.py
ADDED
|
@@ -0,0 +1,52 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import httpx, uuid, os
|
| 2 |
+
from typing import Optional, Union
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
|
| 5 |
+
load_dotenv()
|
| 6 |
+
|
| 7 |
+
rpc_server_url = os.getenv('GIINO_RPC_SERVER_URL')
|
| 8 |
+
|
| 9 |
+
async def rpc_call(
|
| 10 |
+
method_name: str, # The name of the RPC method to be called
|
| 11 |
+
params: Optional[Union[dict, list]] = None, # Optional parameters for the RPC method
|
| 12 |
+
url: str = rpc_server_url # The URL of the RPC server
|
| 13 |
+
) -> dict: # Returns the JSON response from the RPC server
|
| 14 |
+
"""
|
| 15 |
+
This function makes an RPC call to the specified URL with the given method name and parameters.
|
| 16 |
+
|
| 17 |
+
Args:
|
| 18 |
+
method_name (str): The name of the RPC method to be called.
|
| 19 |
+
params (Optional[Union[dict, list]], optional): Optional parameters for the RPC method. Defaults to None.
|
| 20 |
+
url (str, optional): The URL of the RPC server. Defaults to rpc_server_url.
|
| 21 |
+
|
| 22 |
+
Returns:
|
| 23 |
+
dict: The JSON response from the RPC server.
|
| 24 |
+
|
| 25 |
+
Raises:
|
| 26 |
+
httpx.RequestError: If an error occurs while making the RPC call.
|
| 27 |
+
|
| 28 |
+
"""
|
| 29 |
+
headers = {
|
| 30 |
+
'Content-Type': 'application/json',
|
| 31 |
+
}
|
| 32 |
+
|
| 33 |
+
auth = httpx.BasicAuth(
|
| 34 |
+
username=os.getenv('GIINO_RPC_SERVER_BASIC_AUTH_USERNAME'),
|
| 35 |
+
password=os.getenv('GIINO_RPC_SERVER_BASIC_AUTH_PASSWORD')
|
| 36 |
+
)
|
| 37 |
+
|
| 38 |
+
payload = {
|
| 39 |
+
'method': method_name,
|
| 40 |
+
'params': params,
|
| 41 |
+
'jsonrpc': '2.0',
|
| 42 |
+
'id': str(uuid.uuid4()),
|
| 43 |
+
}
|
| 44 |
+
|
| 45 |
+
try:
|
| 46 |
+
async with httpx.AsyncClient() as client:
|
| 47 |
+
response = await client.post(url, json=payload, headers=headers, auth=auth)
|
| 48 |
+
response.raise_for_status()
|
| 49 |
+
return response.json()
|
| 50 |
+
except httpx.RequestError as e:
|
| 51 |
+
print(f"Error making RPC call: {e}")
|
| 52 |
+
return None
|
src/libs/s3fs.py
ADDED
|
@@ -0,0 +1,26 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
import s3fs
|
| 3 |
+
from fs_s3fs import S3FS
|
| 4 |
+
from dotenv import load_dotenv
|
| 5 |
+
|
| 6 |
+
load_dotenv()
|
| 7 |
+
|
| 8 |
+
exchanges_data_s3fs = S3FS(
|
| 9 |
+
bucket_name = 'exchanges-data',
|
| 10 |
+
aws_access_key_id = os.getenv('DO_SPACES_ACCESS_KEY'),
|
| 11 |
+
aws_secret_access_key = os.getenv('DO_SPACES_SECRET_KEY'),
|
| 12 |
+
endpoint_url = os.getenv('DO_SPACES_ENDPOINT'),
|
| 13 |
+
region = os.getenv('DO_SPACES_REGION'),
|
| 14 |
+
)
|
| 15 |
+
|
| 16 |
+
# AWS credentials
|
| 17 |
+
s3_credentials = {
|
| 18 |
+
"s3_access_key_id": os.getenv("DO_SPACES_ACCESS_KEY"),
|
| 19 |
+
"s3_secret_access_key": os.getenv("DO_SPACES_SECRET_KEY"),
|
| 20 |
+
"s3_endpoint_url": os.getenv("DO_SPACES_ENDPOINT_URL"),
|
| 21 |
+
"s3_bucket_name": os.getenv("DO_SPACES_BUCKET_NAME"),
|
| 22 |
+
"s3_region": os.getenv("DO_SPACES_REGION_NAME"),
|
| 23 |
+
}
|
| 24 |
+
|
| 25 |
+
def get_s3_credentials():
|
| 26 |
+
return s3_credentials
|
src/libs/token_approval_helper.py
ADDED
|
@@ -0,0 +1,61 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import requests
|
| 2 |
+
from web3 import Web3
|
| 3 |
+
from typing import Dict, Any
|
| 4 |
+
|
| 5 |
+
class TokenApprovalHelper:
|
| 6 |
+
def __init__(self, web3: Web3, private_key: str):
|
| 7 |
+
self.web3 = web3
|
| 8 |
+
self.private_key = private_key
|
| 9 |
+
|
| 10 |
+
def approve_token(self, erc20_contract_address: str, exchange_proxy_address: str, sell_amount: str, eth_address: str) -> Dict[str, Any]:
|
| 11 |
+
"""
|
| 12 |
+
Approves the 0x Exchange Proxy contract to spend the specified amount of the ERC-20 token.
|
| 13 |
+
|
| 14 |
+
Args:
|
| 15 |
+
erc20_contract_address (str): The address of the ERC-20 token contract.
|
| 16 |
+
exchange_proxy_address (str): The address of the 0x Exchange Proxy contract.
|
| 17 |
+
sell_amount (str): The amount of the token to approve.
|
| 18 |
+
eth_address (str): Your Ethereum address.
|
| 19 |
+
|
| 20 |
+
Returns:
|
| 21 |
+
dict: The transaction receipt of the approval transaction.
|
| 22 |
+
|
| 23 |
+
Example:
|
| 24 |
+
>>> approve_token('0xTokenAddress', '0xProxyAddress', '1000000000000000000', '0xYourAddress')
|
| 25 |
+
"""
|
| 26 |
+
erc20_abi = [
|
| 27 |
+
{
|
| 28 |
+
"constant": False,
|
| 29 |
+
"inputs": [
|
| 30 |
+
{
|
| 31 |
+
"name": "_spender",
|
| 32 |
+
"type": "address"
|
| 33 |
+
},
|
| 34 |
+
{
|
| 35 |
+
"name": "_value",
|
| 36 |
+
"type": "uint256"
|
| 37 |
+
}
|
| 38 |
+
],
|
| 39 |
+
"name": "approve",
|
| 40 |
+
"outputs": [
|
| 41 |
+
{
|
| 42 |
+
"name": "",
|
| 43 |
+
"type": "bool"
|
| 44 |
+
}
|
| 45 |
+
],
|
| 46 |
+
"type": "function"
|
| 47 |
+
}
|
| 48 |
+
]
|
| 49 |
+
erc20_contract = self.web3.eth.contract(address=erc20_contract_address, abi=erc20_abi)
|
| 50 |
+
approve_tx = erc20_contract.functions.approve(
|
| 51 |
+
exchange_proxy_address,
|
| 52 |
+
int(sell_amount)
|
| 53 |
+
).buildTransaction({
|
| 54 |
+
'from': eth_address,
|
| 55 |
+
'nonce': self.web3.eth.getTransactionCount(eth_address),
|
| 56 |
+
'gas': 100000,
|
| 57 |
+
'gasPrice': self.web3.toWei('20', 'gwei')
|
| 58 |
+
})
|
| 59 |
+
signed_approve_tx = self.web3.eth.account.signTransaction(approve_tx, private_key=self.private_key)
|
| 60 |
+
tx_hash = self.web3.eth.sendRawTransaction(signed_approve_tx.rawTransaction)
|
| 61 |
+
return self.web3.eth.waitForTransactionReceipt(tx_hash)
|
src/libs/web3.py
ADDED
|
@@ -0,0 +1,8 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from web3 import Web3
|
| 3 |
+
from dotenv import load_dotenv
|
| 4 |
+
|
| 5 |
+
load_dotenv()
|
| 6 |
+
|
| 7 |
+
def get_web3_instance(rpc_ulr: str = os.getenv('INFURA_OR_ALCHEMY_URL')) -> Web3:
|
| 8 |
+
return Web3(Web3.HTTPProvider(rpc_ulr))
|
src/llms/sourcegraph.py
ADDED
|
@@ -0,0 +1,126 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import os
|
| 2 |
+
from dotenv import load_dotenv
|
| 3 |
+
|
| 4 |
+
from scrapegraphai.graphs import SearchGraph
|
| 5 |
+
from scrapegraphai.graphs import SmartScraperMultiGraph
|
| 6 |
+
from scrapegraphai.graphs import ScriptCreatorGraph
|
| 7 |
+
|
| 8 |
+
from src.databases.redis import REDIS_CACHED
|
| 9 |
+
from src.libs.constants import ONE_HOUR_IN_SECONDS
|
| 10 |
+
from src.libs.logger import logger
|
| 11 |
+
|
| 12 |
+
|
| 13 |
+
load_dotenv()
|
| 14 |
+
|
| 15 |
+
redis_cache = REDIS_CACHED
|
| 16 |
+
|
| 17 |
+
default_graph_config: dict = {
|
| 18 |
+
"openai": {
|
| 19 |
+
"llm": {
|
| 20 |
+
"api_key": os.getenv("OPENAI_API_KEY"),
|
| 21 |
+
"model": "gpt-3.5-turbo",
|
| 22 |
+
# "model": "gpt-4o",?
|
| 23 |
+
"temperature": 0,
|
| 24 |
+
}
|
| 25 |
+
},
|
| 26 |
+
"groq": {
|
| 27 |
+
"llm": {
|
| 28 |
+
"model": "groq/llama3-70b-8192",
|
| 29 |
+
"api_key": os.getenv("GROQ_API_KEY"),
|
| 30 |
+
"temperature": 0
|
| 31 |
+
},
|
| 32 |
+
"embeddings": {
|
| 33 |
+
"model": "ollama/nomic-embed-text",
|
| 34 |
+
"base_url": "http://localhost:11434",
|
| 35 |
+
}
|
| 36 |
+
}
|
| 37 |
+
}
|
| 38 |
+
|
| 39 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 40 |
+
@logger.instrument()
|
| 41 |
+
def scrape_graph(prompt: str, sources: str | list[str], config: dict | str = None, cache_ttl: int = None) -> str:
|
| 42 |
+
"""
|
| 43 |
+
This function scrapes the web using a multi-graph approach. It takes a prompt, sources, optional configuration, and optional cache time-to-live.
|
| 44 |
+
|
| 45 |
+
Args:
|
| 46 |
+
prompt (str): The prompt or query to be used for scraping.
|
| 47 |
+
sources (str | list[str]): The source(s) from which to scrape data. It can be a single string or a list of strings.
|
| 48 |
+
config (dict | str, optional): The configuration for the scraping process. Defaults to the default "openai" configuration.
|
| 49 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached results. Defaults to one hour.
|
| 50 |
+
|
| 51 |
+
Returns:
|
| 52 |
+
str: The scraped data as a string.
|
| 53 |
+
"""
|
| 54 |
+
logger.info({'prompt': prompt, 'sources': sources, 'config': config, 'cache_ttl': cache_ttl})
|
| 55 |
+
|
| 56 |
+
config = type(config) == str and default_graph_config[config] or config or default_graph_config["openai"]
|
| 57 |
+
logger.debug(f"Config: {config}")
|
| 58 |
+
|
| 59 |
+
smart_scraper_graph = SmartScraperMultiGraph(
|
| 60 |
+
prompt=prompt,
|
| 61 |
+
source=sources,
|
| 62 |
+
config=config
|
| 63 |
+
)
|
| 64 |
+
|
| 65 |
+
result = smart_scraper_graph.run()
|
| 66 |
+
logger.debug(f"Result: {result}")
|
| 67 |
+
|
| 68 |
+
return result
|
| 69 |
+
|
| 70 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 71 |
+
@logger.instrument()
|
| 72 |
+
def search_graph(prompt: str, config: dict | str = None, cache_ttl: int = None) -> str:
|
| 73 |
+
"""
|
| 74 |
+
This function uses a SearchGraph to search the web for relevant information based on the provided prompt.
|
| 75 |
+
|
| 76 |
+
Args:
|
| 77 |
+
prompt (str): The prompt or query to be used for searching.
|
| 78 |
+
config (dict | str, optional): The configuration for the search process. Defaults to the default "openai" configuration.
|
| 79 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached results. Defaults to one hour.
|
| 80 |
+
|
| 81 |
+
Returns:
|
| 82 |
+
str: The search results as a string.
|
| 83 |
+
"""
|
| 84 |
+
logger.info({'prompt': prompt, 'config': config, 'cache_ttl': cache_ttl})
|
| 85 |
+
|
| 86 |
+
config = type(config) == str and default_graph_config[config] or config or default_graph_config["openai"]
|
| 87 |
+
logger.debug(f"Config: {config}")
|
| 88 |
+
|
| 89 |
+
search_graph = SearchGraph(
|
| 90 |
+
prompt=prompt,
|
| 91 |
+
config=config,
|
| 92 |
+
)
|
| 93 |
+
|
| 94 |
+
result = search_graph.run()
|
| 95 |
+
logger.debug(f"Result: {result}")
|
| 96 |
+
|
| 97 |
+
return result
|
| 98 |
+
|
| 99 |
+
@redis_cache(ttl=ONE_HOUR_IN_SECONDS)
|
| 100 |
+
@logger.instrument()
|
| 101 |
+
def _create_script_graph(prompt: str, source: str, library: str, config: dict | str = None, cache_ttl: int = None) -> str:
|
| 102 |
+
"""
|
| 103 |
+
This function creates a ScriptCreatorGraph for generating scripts based on the provided prompt, source, and library.
|
| 104 |
+
|
| 105 |
+
Args:
|
| 106 |
+
prompt (str): The prompt or query to be used for generating the script.
|
| 107 |
+
source (str): The source from which to generate the script.
|
| 108 |
+
library (str): The library to be used for generating the script.
|
| 109 |
+
config (dict | str, optional): The configuration for the script creation process. Defaults to the default "openai" configuration.
|
| 110 |
+
cache_ttl (int, optional): The time-to-live (in seconds) for the cached results. Defaults to one hour.
|
| 111 |
+
|
| 112 |
+
Returns:
|
| 113 |
+
str: The generated script as a string.
|
| 114 |
+
"""
|
| 115 |
+
config = type(config) == str and default_graph_config[config] or config or default_graph_config["openai"]
|
| 116 |
+
|
| 117 |
+
script_creator_graph = ScriptCreatorGraph(
|
| 118 |
+
prompt=prompt,
|
| 119 |
+
source=source,
|
| 120 |
+
config=config,
|
| 121 |
+
library=library
|
| 122 |
+
)
|
| 123 |
+
|
| 124 |
+
result = script_creator_graph.run()
|
| 125 |
+
|
| 126 |
+
return result
|
src/search_services/exa.py
ADDED
|
File without changes
|
src/search_services/jina_ai.py
ADDED
|
@@ -0,0 +1,100 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from dotenv import load_dotenv
|
| 2 |
+
|
| 3 |
+
import httpx
|
| 4 |
+
import urllib.parse
|
| 5 |
+
from typing import Self
|
| 6 |
+
from src.libs.helper_functions import convert_to_snakecase
|
| 7 |
+
from src.databases.redis import REDIS_CACHED
|
| 8 |
+
from src.libs.constants import ONE_MINUTE_IN_SECONDS
|
| 9 |
+
from src.libs.constants import JINA_READER_BASE_ENDPOINT, JINA_SEARCH_BASE_ENDPOINT, ONE_MINUTE_IN_SECONDS
|
| 10 |
+
|
| 11 |
+
load_dotenv()
|
| 12 |
+
|
| 13 |
+
redis_cache = REDIS_CACHED
|
| 14 |
+
|
| 15 |
+
class JinaAI:
|
| 16 |
+
"""
|
| 17 |
+
A class for interacting with Jina AI's search and reader services.
|
| 18 |
+
|
| 19 |
+
Attributes:
|
| 20 |
+
JINA_SEARCH_BASE_ENDPOINT (str): The base URL for the Jina AI search service.
|
| 21 |
+
JINA_READER_BASE_ENDPOINT (str): The base URL for the Jina AI reader service.
|
| 22 |
+
|
| 23 |
+
Methods:
|
| 24 |
+
__init__(self, search_base_url: str = None, reader_base_url: str = None) -> None:
|
| 25 |
+
Initialize the JinaAI instance with optional search and reader base URLs.
|
| 26 |
+
|
| 27 |
+
search_web_with_jina(self, search_query: str) -> dict | None:
|
| 28 |
+
Search the web using Jina AI and return the search results as a dictionary.
|
| 29 |
+
|
| 30 |
+
read_website_with_jina(self, website_url: str) -> dict | None:
|
| 31 |
+
Read a website using Jina AI and return the website content as a dictionary.
|
| 32 |
+
"""
|
| 33 |
+
def __init__(self, search_base_url: str = None, reader_base_url: str = None) -> None:
|
| 34 |
+
self.JINA_SEARCH_BASE_ENDPOINT = search_base_url or JINA_SEARCH_BASE_ENDPOINT
|
| 35 |
+
self.JINA_READER_BASE_ENDPOINT = reader_base_url or JINA_READER_BASE_ENDPOINT
|
| 36 |
+
|
| 37 |
+
@redis_cache(ttl=ONE_MINUTE_IN_SECONDS)
|
| 38 |
+
def search_web_with_jina(self, search_query: str) -> dict | None:
|
| 39 |
+
"""
|
| 40 |
+
Search the web using Jina AI.
|
| 41 |
+
|
| 42 |
+
Args:
|
| 43 |
+
search_query (str): The query to be searched on the web.
|
| 44 |
+
|
| 45 |
+
Returns:
|
| 46 |
+
dict | None: A dictionary containing the search results if successful, otherwise None.
|
| 47 |
+
|
| 48 |
+
Raises:
|
| 49 |
+
httpx.HTTPError: If an HTTP error occurs during the request.
|
| 50 |
+
|
| 51 |
+
Usage:
|
| 52 |
+
jina_ai_instance.search_web_with_jina(search_query)
|
| 53 |
+
|
| 54 |
+
Example:
|
| 55 |
+
jina_ai_instance.search_web_with_jina("example search query")
|
| 56 |
+
"""
|
| 57 |
+
url = self.JINA_SEARCH_BASE_ENDPOINT
|
| 58 |
+
encoded_search_query = urllib.parse.quote(search_query)
|
| 59 |
+
headers = {"Accept": "application/json"}
|
| 60 |
+
|
| 61 |
+
try:
|
| 62 |
+
with httpx.Client(timeout=30.0) as client:
|
| 63 |
+
response = client.get(f"{url}{encoded_search_query}", headers=headers)
|
| 64 |
+
response.raise_for_status()
|
| 65 |
+
return convert_to_snakecase(response.json())
|
| 66 |
+
except httpx.HTTPError as e:
|
| 67 |
+
print(f"An error occurred: {e}")
|
| 68 |
+
return None
|
| 69 |
+
|
| 70 |
+
@redis_cache(ttl=ONE_MINUTE_IN_SECONDS)
|
| 71 |
+
def read_website_with_jina(self, website_url: str) -> dict | None:
|
| 72 |
+
"""
|
| 73 |
+
Read a website using Jina AI.
|
| 74 |
+
|
| 75 |
+
Args:
|
| 76 |
+
website_url (str): The URL of the website to be read.
|
| 77 |
+
|
| 78 |
+
Returns:
|
| 79 |
+
dict | None: A dictionary containing the content of the website if successful, otherwise None.
|
| 80 |
+
|
| 81 |
+
Raises:
|
| 82 |
+
httpx.HTTPError: If an HTTP error occurs during the request.
|
| 83 |
+
|
| 84 |
+
Usage:
|
| 85 |
+
jina_ai_instance.read_website_with_jina(website_url)
|
| 86 |
+
|
| 87 |
+
Example:
|
| 88 |
+
jina_ai_instance.read_website_with_jina("https://example.com")
|
| 89 |
+
"""
|
| 90 |
+
url = self.JINA_READER_BASE_ENDPOINT
|
| 91 |
+
headers = {"Accept": "application/json"}
|
| 92 |
+
|
| 93 |
+
try:
|
| 94 |
+
with httpx.Client(timeout=30.0) as client:
|
| 95 |
+
response = client.get(f"{url}{website_url}", headers=headers)
|
| 96 |
+
response.raise_for_status()
|
| 97 |
+
return convert_to_snakecase(response.json())
|
| 98 |
+
except httpx.HTTPError as e:
|
| 99 |
+
print(f"An error occurred: {e}")
|
| 100 |
+
return None
|
src/tools/cloudinary_toolkit.py
ADDED
|
@@ -0,0 +1,57 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import cloudinary
|
| 3 |
+
import cloudinary.uploader
|
| 4 |
+
import cloudinary.api
|
| 5 |
+
from phi.tools import Toolkit
|
| 6 |
+
from phi.utils.log import logger
|
| 7 |
+
|
| 8 |
+
class CloudinaryTool(Toolkit):
|
| 9 |
+
def __init__(
|
| 10 |
+
self,
|
| 11 |
+
cloud_name: str,
|
| 12 |
+
api_key: str,
|
| 13 |
+
api_secret: str,
|
| 14 |
+
):
|
| 15 |
+
super().__init__(name="cloudinary_tool")
|
| 16 |
+
|
| 17 |
+
# Initialize Cloudinary
|
| 18 |
+
cloudinary.config(
|
| 19 |
+
cloud_name=cloud_name,
|
| 20 |
+
api_key=api_key,
|
| 21 |
+
api_secret=api_secret
|
| 22 |
+
)
|
| 23 |
+
|
| 24 |
+
# Registering methods to make them accessible via the toolkit
|
| 25 |
+
self.register(self.upload_file)
|
| 26 |
+
|
| 27 |
+
def upload_file(self, file_path: str) -> str:
|
| 28 |
+
"""
|
| 29 |
+
Uploads a file to Cloudinary.
|
| 30 |
+
|
| 31 |
+
Args:
|
| 32 |
+
file_path (str): The path to the file to upload.
|
| 33 |
+
|
| 34 |
+
Returns:
|
| 35 |
+
dict: A dictionary containing the upload response details.
|
| 36 |
+
|
| 37 |
+
Example:
|
| 38 |
+
>>> upload_file('/path/to/file.pdf')
|
| 39 |
+
"""
|
| 40 |
+
logger.info(f"Uploading file to Cloudinary: {file_path}")
|
| 41 |
+
try:
|
| 42 |
+
response = cloudinary.uploader.upload(file_path, resource_type="raw")
|
| 43 |
+
logger.info(f"Upload response: {response}")
|
| 44 |
+
return f"{response}"
|
| 45 |
+
except Exception as e:
|
| 46 |
+
logger.warning(f"Failed to upload file: {e}")
|
| 47 |
+
return f"Error: {e}"
|
| 48 |
+
|
| 49 |
+
# # Example usage
|
| 50 |
+
# cloudinary_tool = CloudinaryTool(
|
| 51 |
+
# cloud_name="your_cloud_name",
|
| 52 |
+
# api_key="your_api_key",
|
| 53 |
+
# api_secret="your_api_secret"
|
| 54 |
+
# )
|
| 55 |
+
#
|
| 56 |
+
# upload_response = cloudinary_tool.upload_file('/path/to/your/file.txt')
|
| 57 |
+
# print(upload_response)
|
src/tools/crypto_data_toolkit.py
ADDED
|
@@ -0,0 +1,151 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
from phi.tools import Toolkit
|
| 2 |
+
from phi.utils.log import logger
|
| 3 |
+
|
| 4 |
+
from src.data_sources.coin_gecko import CoinGecko
|
| 5 |
+
from src.data_sources.cryptocompare import CryptoCompare
|
| 6 |
+
from src.data_sources.dexscreener import DexScreener
|
| 7 |
+
|
| 8 |
+
class CryptoDataTools(Toolkit):
|
| 9 |
+
def __init__(self):
|
| 10 |
+
super().__init__(name="crypto_data_tools")
|
| 11 |
+
|
| 12 |
+
self.register(self.get_coin_price)
|
| 13 |
+
self.register(self.get_coin_data)
|
| 14 |
+
self.register(self.get_exchange_data)
|
| 15 |
+
self.register(self.get_latest_crypto_news_data)
|
| 16 |
+
|
| 17 |
+
self.coingecko = CoinGecko()
|
| 18 |
+
self.crypto_compare = CryptoCompare()
|
| 19 |
+
self.dex_screener = DexScreener()
|
| 20 |
+
|
| 21 |
+
def get_coin_price(self, coin_id: str, vs_currency: str = "usd") -> str:
|
| 22 |
+
"""
|
| 23 |
+
Fetches the price data for a given cryptocurrency coin from CoinGecko and CryptoCompare.
|
| 24 |
+
|
| 25 |
+
Args:
|
| 26 |
+
coin_id (str): The unique identifier for the cryptocurrency coin.
|
| 27 |
+
vs_currency (str, optional): The currency to which the coin price will be compared. Defaults to "usd".
|
| 28 |
+
|
| 29 |
+
Returns:
|
| 30 |
+
str: A JSON string containing the price data from CoinGecko and CryptoCompare for the specified coin.
|
| 31 |
+
|
| 32 |
+
Raises:
|
| 33 |
+
Exception: If an error occurs while fetching the price data.
|
| 34 |
+
|
| 35 |
+
Example:
|
| 36 |
+
>>> get_coin_price("bitcoin", "eur")
|
| 37 |
+
"""
|
| 38 |
+
logger.info(f"Fetching price data for {coin_id} cryptocurrency coin from CoinGecko and CryptoCompare")
|
| 39 |
+
try:
|
| 40 |
+
coingecko_price = {}
|
| 41 |
+
crypto_compare_price = {}
|
| 42 |
+
|
| 43 |
+
coingecko_price_data = self.coingecko.get_coin_price(ids=[coin_id], vs_currencies=[vs_currency])
|
| 44 |
+
crypto_compare_price_data = self.crypto_compare.get_coin_price(ids=[coin_id], vs_currencies=[vs_currency])
|
| 45 |
+
|
| 46 |
+
logger.debug(f"coingecko_price_data: {coingecko_price_data}")
|
| 47 |
+
logger.debug(f"crypto_compare_price_data: {crypto_compare_price_data}")
|
| 48 |
+
|
| 49 |
+
if coin_id in coingecko_price_data:
|
| 50 |
+
coingecko_price[coin_id] = coingecko_price_data[coin_id]
|
| 51 |
+
else:
|
| 52 |
+
logger.warning(f"Warning: CoinGecko data for {coin_id} not found.")
|
| 53 |
+
|
| 54 |
+
if coin_id.upper() in crypto_compare_price_data:
|
| 55 |
+
crypto_compare_price[coin_id] = crypto_compare_price_data[coin_id.upper()]
|
| 56 |
+
else:
|
| 57 |
+
logger.warning(f"Warning: CryptoCompare data for {coin_id} not found.")
|
| 58 |
+
|
| 59 |
+
logger.debug(f"response {coingecko_price}")
|
| 60 |
+
logger.debug(f"response {crypto_compare_price}")
|
| 61 |
+
|
| 62 |
+
return f"{(coingecko_price, crypto_compare_price)}"
|
| 63 |
+
except Exception as e:
|
| 64 |
+
logger.warning(f"Failed to fetch price data for {coin_id}: {e}")
|
| 65 |
+
return f"Error: {e}"
|
| 66 |
+
|
| 67 |
+
def get_coin_data(self, coin_symbol: str) -> str:
|
| 68 |
+
"""
|
| 69 |
+
Fetches coin data for a given cryptocurrency coin from DexScreener and CryptoCompare.
|
| 70 |
+
|
| 71 |
+
Args:
|
| 72 |
+
coin_symbol (str): The symbol for the cryptocurrency coin.
|
| 73 |
+
|
| 74 |
+
Returns:
|
| 75 |
+
str: A JSON string containing the coin data from DexScreener and CryptoCompare for the specified coin.
|
| 76 |
+
|
| 77 |
+
Raises:
|
| 78 |
+
Exception: If an error occurs while fetching the coin data.
|
| 79 |
+
|
| 80 |
+
Example:
|
| 81 |
+
>>> get_coin_data("BTC")
|
| 82 |
+
"""
|
| 83 |
+
logger.info(f"Fetching coin data for {coin_symbol} cryptocurrency coin")
|
| 84 |
+
try:
|
| 85 |
+
dexscreener_coin_data = self.dex_screener.search(query=coin_symbol)
|
| 86 |
+
cryptocompare_coin_data = self.crypto_compare.get_overall_coin_data(symbol=coin_symbol)
|
| 87 |
+
|
| 88 |
+
logger.debug(f"dexscreener_coin_data: {dexscreener_coin_data}")
|
| 89 |
+
logger.debug(f"cryptocompare_coin_data: {cryptocompare_coin_data}")
|
| 90 |
+
|
| 91 |
+
return f"{(dexscreener_coin_data, cryptocompare_coin_data)}"
|
| 92 |
+
except Exception as e:
|
| 93 |
+
logger.warning(f"Failed to fetch coin data for {coin_symbol}: {e}")
|
| 94 |
+
return f"Error: {e}"
|
| 95 |
+
|
| 96 |
+
def get_exchange_data(self, exchange: str) -> str:
|
| 97 |
+
"""
|
| 98 |
+
Fetches exchange data for a given cryptocurrency exchange from CoinGecko and DexScreener.
|
| 99 |
+
|
| 100 |
+
Args:
|
| 101 |
+
exchange (str): The unique identifier for the cryptocurrency exchange.
|
| 102 |
+
|
| 103 |
+
Returns:
|
| 104 |
+
str: A JSON string containing the exchange data from CoinGecko and DexScreener for the specified exchange.
|
| 105 |
+
|
| 106 |
+
Raises:
|
| 107 |
+
Exception: If an error occurs while fetching the exchange data.
|
| 108 |
+
|
| 109 |
+
Example:
|
| 110 |
+
>>> get_exchange_data("binance")
|
| 111 |
+
"""
|
| 112 |
+
logger.info(f"Fetching exchange data for {exchange} cryptocurrency exchange")
|
| 113 |
+
try:
|
| 114 |
+
coin_gecko_exchange_data = self.coingecko.get_exchange_data(id=exchange)
|
| 115 |
+
dexscreener_exchange_data = self.dex_screener.search(query=exchange)
|
| 116 |
+
|
| 117 |
+
logger.debug(f"coingecko exchange data: {coin_gecko_exchange_data}")
|
| 118 |
+
logger.debug(f"dexscreener exchange data: {dexscreener_exchange_data}")
|
| 119 |
+
|
| 120 |
+
return f"{(coin_gecko_exchange_data, dexscreener_exchange_data)}"
|
| 121 |
+
except Exception as e:
|
| 122 |
+
logger.warning(f"Failed to fetch exchange data for {exchange}: {e}")
|
| 123 |
+
return f"Error: {e}"
|
| 124 |
+
|
| 125 |
+
def get_latest_crypto_news_data(self) -> str:
|
| 126 |
+
"""
|
| 127 |
+
Fetches the latest cryptocurrency news data from CoinGecko and CryptoCompare.
|
| 128 |
+
|
| 129 |
+
Returns:
|
| 130 |
+
str: A JSON string containing the latest cryptocurrency news data from CoinGecko and CryptoCompare.
|
| 131 |
+
|
| 132 |
+
Raises:
|
| 133 |
+
Exception: If an error occurs while fetching the latest cryptocurrency news data.
|
| 134 |
+
|
| 135 |
+
Example:
|
| 136 |
+
>>> get_latest_crypto_news_data()
|
| 137 |
+
"""
|
| 138 |
+
logger.info("Fetching latest cryptocurrency news data")
|
| 139 |
+
try:
|
| 140 |
+
coin_gecko_trending_coins = self.coingecko.get_trending_coin_list()
|
| 141 |
+
cryptocompare_news_categories = self.crypto_compare.get_news_categories()
|
| 142 |
+
cryptocompare_lates_news_articles = self.crypto_compare.get_latest_news_articles()
|
| 143 |
+
|
| 144 |
+
logger.debug(f"coingecko trending coins: {coin_gecko_trending_coins}")
|
| 145 |
+
logger.debug(f"cryptocompare news categories: {cryptocompare_news_categories}")
|
| 146 |
+
logger.debug(f"cryptocompare latest news articles: {cryptocompare_lates_news_articles}")
|
| 147 |
+
|
| 148 |
+
return f"{(coin_gecko_trending_coins, cryptocompare_news_categories, cryptocompare_lates_news_articles)}"
|
| 149 |
+
except Exception as e:
|
| 150 |
+
logger.warning(f"Failed to fetch latest cryptocurrency news data: {e}")
|
| 151 |
+
return f"Error: {e}"
|
src/tools/crypto_evm_wallet_toolkit.py
ADDED
|
@@ -0,0 +1,97 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
from enum import Enum
|
| 3 |
+
from phi.tools import Toolkit
|
| 4 |
+
from phi.utils.log import logger
|
| 5 |
+
from src.libs.rpc_client import rpc_call
|
| 6 |
+
|
| 7 |
+
class CryptoEVMWalletTools(Toolkit):
|
| 8 |
+
def __init__(self):
|
| 9 |
+
super().__init__(name="crypto_evm_wallet_tools")
|
| 10 |
+
|
| 11 |
+
# Registering methods to make them accessible via the toolkit
|
| 12 |
+
self.register(self.get_evm_wallet_address)
|
| 13 |
+
self.register(self.get_supported_evm_chains)
|
| 14 |
+
self.register(self.get_evm_smart_wallet_address)
|
| 15 |
+
self.chains = self.get_supported_evm_chains()
|
| 16 |
+
|
| 17 |
+
def get_supported_evm_chains(self) -> list[str]:
|
| 18 |
+
"""
|
| 19 |
+
Fetches the list of supported EVM chains.
|
| 20 |
+
|
| 21 |
+
Returns:
|
| 22 |
+
str: A string representation of the response from the RPC call containing the list of supported EVM chains.
|
| 23 |
+
|
| 24 |
+
Raises:
|
| 25 |
+
None
|
| 26 |
+
"""
|
| 27 |
+
logger.info("Fetching supported EVM chains")
|
| 28 |
+
|
| 29 |
+
params = {}
|
| 30 |
+
response = asyncio.run(rpc_call(method_name="getEVMSupportedChains", params=params))
|
| 31 |
+
return f"{response}"
|
| 32 |
+
|
| 33 |
+
def get_evm_wallet_address(self, user_email: str, chain: str, testnet: bool = True) -> str:
|
| 34 |
+
"""
|
| 35 |
+
Fetches an EVM wallet address for the given user email and supported chain.
|
| 36 |
+
Creates and returns an EVM wallet address for the given user email and supported chain.
|
| 37 |
+
|
| 38 |
+
Parameters:
|
| 39 |
+
- user_email (str): The email of the user for whom the wallet is being created.
|
| 40 |
+
- chain (ethereum | binance | base | polygon): The EVM chain for which the wallet is being fetched.
|
| 41 |
+
- testnet (bool, optional): A flag indicating whether the wallet should be on the testnet. Defaults to `True`.
|
| 42 |
+
|
| 43 |
+
Returns:
|
| 44 |
+
- str: A string representation of the response from the RPC call.
|
| 45 |
+
|
| 46 |
+
Raises:
|
| 47 |
+
None
|
| 48 |
+
|
| 49 |
+
Note:
|
| 50 |
+
This method uses asyncio.run() to run the asynchronous RPC call.
|
| 51 |
+
"""
|
| 52 |
+
logger.info(f"Creating crypto wallet account for {user_email}")
|
| 53 |
+
|
| 54 |
+
params = {
|
| 55 |
+
'chain': chain,
|
| 56 |
+
'testnet': testnet,
|
| 57 |
+
'userEmail': user_email,
|
| 58 |
+
}
|
| 59 |
+
response = asyncio.run(rpc_call(method_name="getEVMWallet", params=params))
|
| 60 |
+
return f"{response}"
|
| 61 |
+
|
| 62 |
+
def get_evm_smart_wallet_address(
|
| 63 |
+
self,
|
| 64 |
+
user_email: str,
|
| 65 |
+
chain: str,
|
| 66 |
+
gasless: bool = True,
|
| 67 |
+
testnet: bool = True
|
| 68 |
+
) -> str:
|
| 69 |
+
"""
|
| 70 |
+
Fetches a smart EVM wallet address for the given user email and supported chain.
|
| 71 |
+
Creates and returns a smart EVM wallet address for the given user email and supported chain.
|
| 72 |
+
|
| 73 |
+
Parameters:
|
| 74 |
+
- user_email (str): The email of the user for whom the wallet is being fetched.
|
| 75 |
+
- chain (ethereum | binance | base | polygon): The EVM chain for which the wallet is being fetched.
|
| 76 |
+
- gasless (bool, optional): A flag indicating whether the wallet should be gasless. Defaults to `True`.
|
| 77 |
+
- testnet (bool, optional): A flag indicating whether the wallet should be on the testnet. Defaults to `True`.
|
| 78 |
+
|
| 79 |
+
Returns:
|
| 80 |
+
- str: A string representation of the response from the RPC call.
|
| 81 |
+
|
| 82 |
+
Raises:
|
| 83 |
+
None
|
| 84 |
+
|
| 85 |
+
Note:
|
| 86 |
+
This method uses `asyncio.run()` to run the asynchronous RPC call.
|
| 87 |
+
"""
|
| 88 |
+
logger.info(f"Fetching crypto wallet account for {user_email}")
|
| 89 |
+
|
| 90 |
+
params = {
|
| 91 |
+
'chain': chain,
|
| 92 |
+
'gasless': gasless,
|
| 93 |
+
'testnet': testnet,
|
| 94 |
+
'userEmail': user_email,
|
| 95 |
+
}
|
| 96 |
+
response = asyncio.run(rpc_call(method_name="getEVMSmartWallet", params=params))
|
| 97 |
+
return f"{response}"
|
src/tools/crypto_swap_toolkit.py
ADDED
|
@@ -0,0 +1,156 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import json
|
| 3 |
+
import pprint
|
| 4 |
+
import requests
|
| 5 |
+
from web3 import Web3
|
| 6 |
+
from phi.tools import Toolkit
|
| 7 |
+
from phi.utils.log import logger
|
| 8 |
+
from src.libs.web3 import get_web3_instance
|
| 9 |
+
from src.libs.helper_functions import get_headers, get_private_key
|
| 10 |
+
from src.libs.token_approval_helper import TokenApprovalHelper
|
| 11 |
+
from src.libs.rpc_client import rpc_call
|
| 12 |
+
|
| 13 |
+
class CryptoSwapTools(Toolkit):
|
| 14 |
+
def __init__(self, web3: Web3 = get_web3_instance()):
|
| 15 |
+
super().__init__(name="swap_tools")
|
| 16 |
+
|
| 17 |
+
# Store Web3 instance
|
| 18 |
+
self.web3 = web3
|
| 19 |
+
|
| 20 |
+
# Helper for token approval
|
| 21 |
+
self.token_approval_helper = TokenApprovalHelper(web3, get_private_key())
|
| 22 |
+
|
| 23 |
+
# Registering methods to make them accessible via the toolkit
|
| 24 |
+
self.register(self.get_swap_quote)
|
| 25 |
+
self.register(self.get_swap_price)
|
| 26 |
+
self.register(self.get_swap_sources)
|
| 27 |
+
self.register(self.execute_swap)
|
| 28 |
+
|
| 29 |
+
def get_swap_quote(self, buy_token: str, sell_token: str, sell_amount: str) -> str:
|
| 30 |
+
"""
|
| 31 |
+
Fetches a swap quote from the 0x Swap API.
|
| 32 |
+
|
| 33 |
+
Args:
|
| 34 |
+
buy_token (str): The token to buy (e.g., 'DAI').
|
| 35 |
+
sell_token (str): The token to sell (e.g., 'ETH').
|
| 36 |
+
sell_amount (str): The amount of the sell token to swap, in the smallest unit (e.g., wei for ETH).
|
| 37 |
+
|
| 38 |
+
Returns:
|
| 39 |
+
dict: A dictionary containing the swap quote details.
|
| 40 |
+
|
| 41 |
+
Example:
|
| 42 |
+
>>> get_swap_quote('DAI', 'ETH', '1000000000000000000')
|
| 43 |
+
"""
|
| 44 |
+
logger.info(f"Fetching swap quote: buying {buy_token} with {sell_token} amount {sell_amount}")
|
| 45 |
+
try:
|
| 46 |
+
params = {
|
| 47 |
+
'buyToken': buy_token,
|
| 48 |
+
'sellToken': sell_token,
|
| 49 |
+
'sellAmount': sell_amount
|
| 50 |
+
}
|
| 51 |
+
response = asyncio.run(rpc_call(method_name="getSwapQuote", params=params))
|
| 52 |
+
return f"{response}"
|
| 53 |
+
except requests.exceptions.RequestException as e:
|
| 54 |
+
logger.warning(f"Failed to get swap quote: {e}")
|
| 55 |
+
# return {"error": str(e)}
|
| 56 |
+
return f"Error: {e}"
|
| 57 |
+
|
| 58 |
+
def get_swap_price(self, buy_token: str, sell_token: str, buy_amount: str) -> str:
|
| 59 |
+
"""
|
| 60 |
+
Fetches the price for a swap from the 0x Swap API.
|
| 61 |
+
|
| 62 |
+
Args:
|
| 63 |
+
buy_token (str): The token to buy.
|
| 64 |
+
sell_token (str): The token to sell.
|
| 65 |
+
buy_amount (str): The amount of the buy token, in the smallest unit.
|
| 66 |
+
|
| 67 |
+
Returns:
|
| 68 |
+
dict: A dictionary containing the swap price details.
|
| 69 |
+
|
| 70 |
+
Example:
|
| 71 |
+
>>> get_swap_price('DAI', 'ETH', '1000000000000000000')
|
| 72 |
+
"""
|
| 73 |
+
logger.info(f"Fetching swap price: buying {buy_token} with {sell_token} amount {buy_amount}")
|
| 74 |
+
try:
|
| 75 |
+
params = {
|
| 76 |
+
'buyToken': buy_token,
|
| 77 |
+
'sellToken': sell_token,
|
| 78 |
+
'buyAmount': buy_amount
|
| 79 |
+
}
|
| 80 |
+
response = asyncio.run(rpc_call(method_name="getSwapPrice", params=params))
|
| 81 |
+
return f"{response}"
|
| 82 |
+
except requests.exceptions.RequestException as e:
|
| 83 |
+
logger.warning(f"Failed to get swap price: {e}")
|
| 84 |
+
# return {"error": str(e)}
|
| 85 |
+
return f"Error: {e}"
|
| 86 |
+
|
| 87 |
+
def get_swap_sources(self) -> str:
|
| 88 |
+
"""
|
| 89 |
+
Fetches the list of liquidity sources from the 0x Swap API.
|
| 90 |
+
|
| 91 |
+
Returns:
|
| 92 |
+
dict: A dictionary containing the list of liquidity sources.
|
| 93 |
+
|
| 94 |
+
Example:
|
| 95 |
+
>>> get_swap_sources()
|
| 96 |
+
"""
|
| 97 |
+
logger.info("Fetching swap sources")
|
| 98 |
+
|
| 99 |
+
response = asyncio.run(rpc_call(method_name="getSwapSources"))
|
| 100 |
+
return f"{response}"
|
| 101 |
+
|
| 102 |
+
def execute_swap(self, buy_token: str, sell_token: str, sell_amount: str, eth_address: str) -> str:
|
| 103 |
+
"""
|
| 104 |
+
Executes a swap using the 0x Swap API.
|
| 105 |
+
|
| 106 |
+
Args:
|
| 107 |
+
buy_token (str): The token to buy (e.g., 'DAI').
|
| 108 |
+
sell_token (str): The token to sell (e.g., 'ETH').
|
| 109 |
+
sell_amount (str): The amount of the sell token to swap, in the smallest unit (e.g., wei for ETH).
|
| 110 |
+
eth_address (str): The Ethereum address of the user executing the swap.
|
| 111 |
+
|
| 112 |
+
Returns:
|
| 113 |
+
dict: The transaction receipt of the swap transaction.
|
| 114 |
+
|
| 115 |
+
Example:
|
| 116 |
+
>>> execute_swap('DAI', 'ETH', '1000000000000000000', '0xYourEthereumAddress')
|
| 117 |
+
"""
|
| 118 |
+
# Get the swap quote
|
| 119 |
+
quote = json.loads(self.get_swap_quote(buy_token, sell_token, sell_amount))
|
| 120 |
+
logger.info(f"Swap quote: {quote}")
|
| 121 |
+
|
| 122 |
+
if 'error' in quote:
|
| 123 |
+
# return {"error": "Failed to get swap quote"}
|
| 124 |
+
return f"Error: Failed to get swap quote"
|
| 125 |
+
|
| 126 |
+
# Approve the token if needed (skip if selling ETH)
|
| 127 |
+
if sell_token != 'ETH':
|
| 128 |
+
approval_receipt = self.token_approval_helper.approve_token(sell_token, quote['allowanceTarget'],
|
| 129 |
+
sell_amount, eth_address)
|
| 130 |
+
logger.info(f"Approval receipt: {approval_receipt}")
|
| 131 |
+
|
| 132 |
+
if 'status' not in approval_receipt or approval_receipt['status'] != 1:
|
| 133 |
+
# return {"error": "Token approval failed"}
|
| 134 |
+
return f"Error: Token approval failed"
|
| 135 |
+
|
| 136 |
+
# Execute the swap
|
| 137 |
+
try:
|
| 138 |
+
swap_tx = {
|
| 139 |
+
'from': eth_address,
|
| 140 |
+
'to': quote['to'],
|
| 141 |
+
'data': quote['data'],
|
| 142 |
+
'value': int(quote['value']),
|
| 143 |
+
'gas': 200000,
|
| 144 |
+
'gasPrice': self.web3.to_wei('20', 'gwei'),
|
| 145 |
+
'nonce': self.web3.eth.get_transaction_count(eth_address)
|
| 146 |
+
}
|
| 147 |
+
signed_swap_tx = self.web3.eth.account.signTransaction(swap_tx, private_key=get_private_key())
|
| 148 |
+
tx_hash = self.web3.eth.send_raw_transaction(signed_swap_tx.rawTransaction)
|
| 149 |
+
receipt = self.web3.eth.wait_for_transaction_receipt(tx_hash)
|
| 150 |
+
logger.info(f"Swap transaction receipt: {receipt}")
|
| 151 |
+
# return receipt
|
| 152 |
+
return f"{(receipt)}"
|
| 153 |
+
except Exception as e:
|
| 154 |
+
logger.warning(f"Failed to execute swap: {e}")
|
| 155 |
+
# return {"error": str(e)}
|
| 156 |
+
return f"Error: {e}"
|
src/tools/notepad_toolkit.py
ADDED
|
@@ -0,0 +1,214 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import asyncio
|
| 2 |
+
import chainlit as cl
|
| 3 |
+
from phi.tools import Toolkit
|
| 4 |
+
from phi.utils.log import logger
|
| 5 |
+
from src.libs.rpc_client import rpc_call # Replace with the actual path to your RPC client library
|
| 6 |
+
|
| 7 |
+
class NotedPadToolkit(Toolkit):
|
| 8 |
+
def __init__(self):
|
| 9 |
+
super().__init__(name="noted_pad_toolkit")
|
| 10 |
+
|
| 11 |
+
# Registering methods to make them accessible via the toolkit
|
| 12 |
+
self.register(self.get_note)
|
| 13 |
+
self.register(self.get_all_notes)
|
| 14 |
+
self.register(self.add_note)
|
| 15 |
+
self.register(self.update_note)
|
| 16 |
+
self.register(self.delete_note)
|
| 17 |
+
self.register(self.query_notes)
|
| 18 |
+
|
| 19 |
+
def get_user_email(self) -> str:
|
| 20 |
+
"""
|
| 21 |
+
Fetches the user's email from the Chainlit user session.
|
| 22 |
+
|
| 23 |
+
Returns:
|
| 24 |
+
str: The user's email.
|
| 25 |
+
|
| 26 |
+
Example:
|
| 27 |
+
>>> get_user_email()
|
| 28 |
+
"""
|
| 29 |
+
logger.info("Fetching user email")
|
| 30 |
+
try:
|
| 31 |
+
user_session = cl.user_session.get("user").metadata
|
| 32 |
+
if not user_session:
|
| 33 |
+
raise ValueError("User session not found")
|
| 34 |
+
email = user_session['email']
|
| 35 |
+
return email
|
| 36 |
+
except Exception as e:
|
| 37 |
+
logger.warning(f"Failed to get user email: {e}")
|
| 38 |
+
return f"Error: {e}"
|
| 39 |
+
|
| 40 |
+
@cl.on_chat_start
|
| 41 |
+
def get_note(self, note_id: str) -> str:
|
| 42 |
+
"""
|
| 43 |
+
Fetches a specific note from the RPC server.
|
| 44 |
+
|
| 45 |
+
Args:
|
| 46 |
+
note_id (str): The ID of the note to fetch.
|
| 47 |
+
|
| 48 |
+
Returns:
|
| 49 |
+
str: The requested note.
|
| 50 |
+
|
| 51 |
+
Example:
|
| 52 |
+
>>> get_note('note_1')
|
| 53 |
+
"""
|
| 54 |
+
logger.info(f"Fetching note: {note_id}")
|
| 55 |
+
try:
|
| 56 |
+
email = self.get_user_email()
|
| 57 |
+
if "Error" in email:
|
| 58 |
+
raise ValueError(email)
|
| 59 |
+
|
| 60 |
+
params = {
|
| 61 |
+
'email': email,
|
| 62 |
+
'note_id': note_id
|
| 63 |
+
}
|
| 64 |
+
response = asyncio.run(rpc_call(method_name="getNote", params=params))
|
| 65 |
+
return f"{response}"
|
| 66 |
+
except Exception as e:
|
| 67 |
+
logger.warning(f"Failed to get note: {e}")
|
| 68 |
+
return f"Error: {e}"
|
| 69 |
+
|
| 70 |
+
def get_all_notes(self) -> str:
|
| 71 |
+
"""
|
| 72 |
+
Fetches all notes from the RPC server.
|
| 73 |
+
|
| 74 |
+
Returns:
|
| 75 |
+
dict: A dictionary containing all notes.
|
| 76 |
+
|
| 77 |
+
Example:
|
| 78 |
+
>>> get_all_notes()
|
| 79 |
+
"""
|
| 80 |
+
logger.info("Fetching all notes")
|
| 81 |
+
try:
|
| 82 |
+
email = self.get_user_email()
|
| 83 |
+
if "Error" in email:
|
| 84 |
+
raise ValueError(email)
|
| 85 |
+
|
| 86 |
+
params = {
|
| 87 |
+
'email': email
|
| 88 |
+
}
|
| 89 |
+
print(params)
|
| 90 |
+
response = asyncio.run(rpc_call(method_name="getAllNotes", params=params))
|
| 91 |
+
return f"{response}"
|
| 92 |
+
except Exception as e:
|
| 93 |
+
logger.warning(f"Failed to get all notes: {e}")
|
| 94 |
+
return f"Error: {e}"
|
| 95 |
+
|
| 96 |
+
def add_note(self, note_id: str, note_content: str) -> str:
|
| 97 |
+
"""
|
| 98 |
+
Adds a new note to the RPC server. add important notes during conversationß
|
| 99 |
+
|
| 100 |
+
Args:
|
| 101 |
+
note_id (str): The ID of the note to add.
|
| 102 |
+
note_content (str): The content of the note.
|
| 103 |
+
|
| 104 |
+
Returns:
|
| 105 |
+
str: Confirmation message.
|
| 106 |
+
|
| 107 |
+
Example:
|
| 108 |
+
>>> add_note('note_1', 'This is a new note')
|
| 109 |
+
"""
|
| 110 |
+
logger.info(f"Adding note: {note_id}")
|
| 111 |
+
try:
|
| 112 |
+
email = self.get_user_email()
|
| 113 |
+
if "Error" in email:
|
| 114 |
+
raise ValueError(email)
|
| 115 |
+
|
| 116 |
+
params = {
|
| 117 |
+
'email': email,
|
| 118 |
+
'note_id': note_id,
|
| 119 |
+
'note_content': note_content
|
| 120 |
+
}
|
| 121 |
+
response = asyncio.run(rpc_call(method_name="addNote", params=params))
|
| 122 |
+
return f"{response}"
|
| 123 |
+
except Exception as e:
|
| 124 |
+
logger.warning(f"Failed to add note: {e}")
|
| 125 |
+
return f"Error: {e}"
|
| 126 |
+
|
| 127 |
+
def update_note(self, note_id: str, note_content: str) -> str:
|
| 128 |
+
"""
|
| 129 |
+
Updates an existing note on the RPC server.
|
| 130 |
+
|
| 131 |
+
Args:
|
| 132 |
+
note_id (str): The ID of the note to update.
|
| 133 |
+
note_content (str): The new content of the note.
|
| 134 |
+
|
| 135 |
+
Returns:
|
| 136 |
+
str: Confirmation message.
|
| 137 |
+
|
| 138 |
+
Example:
|
| 139 |
+
>>> update_note('note_1', 'This is an updated note')
|
| 140 |
+
"""
|
| 141 |
+
logger.info(f"Updating note: {note_id}")
|
| 142 |
+
try:
|
| 143 |
+
email = self.get_user_email()
|
| 144 |
+
if "Error" in email:
|
| 145 |
+
raise ValueError(email)
|
| 146 |
+
|
| 147 |
+
params = {
|
| 148 |
+
'email': email,
|
| 149 |
+
'note_id': note_id,
|
| 150 |
+
'note_content': note_content
|
| 151 |
+
}
|
| 152 |
+
response = asyncio.run(rpc_call(method_name="updateNote", params=params))
|
| 153 |
+
return f"{response}"
|
| 154 |
+
except Exception as e:
|
| 155 |
+
logger.warning(f"Failed to update note: {e}")
|
| 156 |
+
return f"Error: {e}"
|
| 157 |
+
|
| 158 |
+
def delete_note(self, note_id: str) -> str:
|
| 159 |
+
"""
|
| 160 |
+
Deletes a note from the RPC server.
|
| 161 |
+
|
| 162 |
+
Args:
|
| 163 |
+
note_id (str): The ID of the note to delete.
|
| 164 |
+
|
| 165 |
+
Returns:
|
| 166 |
+
str: Confirmation message.
|
| 167 |
+
|
| 168 |
+
Example:
|
| 169 |
+
>>> delete_note('note_1')
|
| 170 |
+
"""
|
| 171 |
+
logger.info(f"Deleting note: {note_id}")
|
| 172 |
+
try:
|
| 173 |
+
email = self.get_user_email()
|
| 174 |
+
if "Error" in email:
|
| 175 |
+
raise ValueError(email)
|
| 176 |
+
|
| 177 |
+
params = {
|
| 178 |
+
'email': email,
|
| 179 |
+
'note_id': note_id
|
| 180 |
+
}
|
| 181 |
+
response = asyncio.run(rpc_call(method_name="deleteNote", params=params))
|
| 182 |
+
return f"{response}"
|
| 183 |
+
except Exception as e:
|
| 184 |
+
logger.warning(f"Failed to delete note: {e}")
|
| 185 |
+
return f"Error: {e}"
|
| 186 |
+
|
| 187 |
+
def query_notes(self, query: str) -> str:
|
| 188 |
+
"""
|
| 189 |
+
Queries notes from the RPC server based on the provided context.
|
| 190 |
+
|
| 191 |
+
Args:
|
| 192 |
+
query (str): The context or keywords to search for in notes.
|
| 193 |
+
|
| 194 |
+
Returns:
|
| 195 |
+
str: The notes that match the query.
|
| 196 |
+
|
| 197 |
+
Example:
|
| 198 |
+
>>> query_notes('spiritual growth')
|
| 199 |
+
"""
|
| 200 |
+
logger.info(f"Querying notes with context: {query}")
|
| 201 |
+
try:
|
| 202 |
+
email = self.get_user_email()
|
| 203 |
+
if "Error" in email:
|
| 204 |
+
raise ValueError(email)
|
| 205 |
+
|
| 206 |
+
params = {
|
| 207 |
+
'email': email,
|
| 208 |
+
'query': query
|
| 209 |
+
}
|
| 210 |
+
response = asyncio.run(rpc_call(method_name="queryNotes", params=params))
|
| 211 |
+
return f"{response}"
|
| 212 |
+
except Exception as e:
|
| 213 |
+
logger.warning(f"Failed to query notes: {e}")
|
| 214 |
+
return f"Error: {e}"
|
src/tools/user_profile_toolkit.py
ADDED
|
@@ -0,0 +1,185 @@
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
| 1 |
+
import chainlit as cl
|
| 2 |
+
from phi.tools import Toolkit
|
| 3 |
+
from phi.utils.log import logger
|
| 4 |
+
|
| 5 |
+
|
| 6 |
+
class UserProfileToolkit(Toolkit):
|
| 7 |
+
def __init__(self):
|
| 8 |
+
super().__init__(name="user_profile_toolkit")
|
| 9 |
+
|
| 10 |
+
# Registering methods to make them accessible via the toolkit
|
| 11 |
+
self.register(self.get_user_name)
|
| 12 |
+
self.register(self.get_user_email)
|
| 13 |
+
self.register(self.get_user_picture)
|
| 14 |
+
self.register(self.get_all_user_info)
|
| 15 |
+
self.register(self.update_user_info)
|
| 16 |
+
self.register(self.update_user_name)
|
| 17 |
+
self.register(self.update_user_email)
|
| 18 |
+
self.register(self.update_user_picture)
|
| 19 |
+
|
| 20 |
+
@cl.on_chat_start
|
| 21 |
+
def get_user_info(self, info_type: str) -> str:
|
| 22 |
+
"""
|
| 23 |
+
Fetches user information from the Chainlit user session.
|
| 24 |
+
|
| 25 |
+
Args:
|
| 26 |
+
info_type (str): The type of information to fetch ('name', 'email', 'picture').
|
| 27 |
+
|
| 28 |
+
Returns:
|
| 29 |
+
str: The requested user information.
|
| 30 |
+
|
| 31 |
+
Example:
|
| 32 |
+
>>> get_user_info('name')
|
| 33 |
+
"""
|
| 34 |
+
logger.info(f"Fetching user info: {info_type}")
|
| 35 |
+
try:
|
| 36 |
+
# Fetch user session data
|
| 37 |
+
user_session = cl.user_session.get("user").metadata
|
| 38 |
+
if not user_session:
|
| 39 |
+
raise ValueError("User session not found")
|
| 40 |
+
|
| 41 |
+
response = user_session[info_type]
|
| 42 |
+
return f"{response}"
|
| 43 |
+
except Exception as e:
|
| 44 |
+
logger.warning(f"Failed to get user info: {e}")
|
| 45 |
+
return f"Error: {e}"
|
| 46 |
+
|
| 47 |
+
def get_all_user_info(self) -> str:
|
| 48 |
+
"""
|
| 49 |
+
Fetches all user information from the Chainlit user session.
|
| 50 |
+
|
| 51 |
+
Returns:
|
| 52 |
+
dict: A dictionary containing all user information.
|
| 53 |
+
|
| 54 |
+
Example:
|
| 55 |
+
>>> get_all_user_info()
|
| 56 |
+
"""
|
| 57 |
+
logger.info("Fetching all user info")
|
| 58 |
+
try:
|
| 59 |
+
# Fetch user session data
|
| 60 |
+
user_session = cl.user_session.get("user").metadata
|
| 61 |
+
if not user_session:
|
| 62 |
+
raise ValueError("User session not found")
|
| 63 |
+
|
| 64 |
+
response = user_session
|
| 65 |
+
return f"{response}"
|
| 66 |
+
except Exception as e:
|
| 67 |
+
logger.warning(f"Failed to get all user info: {e}")
|
| 68 |
+
# return {"error": str(e)}
|
| 69 |
+
return f"Error: {e}"
|
| 70 |
+
|
| 71 |
+
def update_user_info(self, info_type: str, value: str) -> str:
|
| 72 |
+
"""
|
| 73 |
+
Updates user information in the Chainlit user session.
|
| 74 |
+
|
| 75 |
+
Args:
|
| 76 |
+
info_type (str): The type of information to update ('name', 'email', 'picture').
|
| 77 |
+
value (str): The new value to set.
|
| 78 |
+
|
| 79 |
+
Returns:
|
| 80 |
+
str: Confirmation message.
|
| 81 |
+
|
| 82 |
+
Example:
|
| 83 |
+
>>> update_user_info('name', 'Jane Doe')
|
| 84 |
+
"""
|
| 85 |
+
logger.info(f"Updating user info: {info_type} to {value}")
|
| 86 |
+
try:
|
| 87 |
+
# Fetch user session data
|
| 88 |
+
user_session = cl.user_session.get("user").metadata
|
| 89 |
+
if not user_session:
|
| 90 |
+
raise ValueError("User session not found")
|
| 91 |
+
|
| 92 |
+
# Update the user session data
|
| 93 |
+
user_session[info_type] = value
|
| 94 |
+
cl.user_session.set("user", user_session)
|
| 95 |
+
return f"{info_type} updated to {value}"
|
| 96 |
+
except Exception as e:
|
| 97 |
+
logger.warning(f"Failed to update user info: {e}")
|
| 98 |
+
return f"Error: {e}"
|
| 99 |
+
|
| 100 |
+
def get_user_name(self) -> str:
|
| 101 |
+
"""
|
| 102 |
+
Fetches the user's name from the Chainlit user session.
|
| 103 |
+
|
| 104 |
+
Returns:
|
| 105 |
+
str: The user's name.
|
| 106 |
+
|
| 107 |
+
Example:
|
| 108 |
+
>>> get_user_name()
|
| 109 |
+
"""
|
| 110 |
+
response = self.get_user_info('name')
|
| 111 |
+
return f"{response}"
|
| 112 |
+
|
| 113 |
+
def get_user_email(self) -> str:
|
| 114 |
+
"""
|
| 115 |
+
Fetches the user's email from the Chainlit user session.
|
| 116 |
+
|
| 117 |
+
Returns:
|
| 118 |
+
str: The user's email.
|
| 119 |
+
|
| 120 |
+
Example:
|
| 121 |
+
>>> get_user_email()
|
| 122 |
+
"""
|
| 123 |
+
response = self.get_user_info('email')
|
| 124 |
+
return f"{response}"
|
| 125 |
+
|
| 126 |
+
def get_user_picture(self) -> str:
|
| 127 |
+
"""
|
| 128 |
+
Fetches the user's picture URL from the Chainlit user session.
|
| 129 |
+
|
| 130 |
+
Returns:
|
| 131 |
+
str: The user's picture URL.
|
| 132 |
+
|
| 133 |
+
Example:
|
| 134 |
+
>>> get_user_picture()
|
| 135 |
+
"""
|
| 136 |
+
response = self.get_user_info('picture')
|
| 137 |
+
return f"{response}"
|
| 138 |
+
|
| 139 |
+
def update_user_name(self, name: str) -> str:
|
| 140 |
+
"""
|
| 141 |
+
Updates the user's name in the Chainlit user session.
|
| 142 |
+
|
| 143 |
+
Args:
|
| 144 |
+
name (str): The new name to set.
|
| 145 |
+
|
| 146 |
+
Returns:
|
| 147 |
+
str: Confirmation message.
|
| 148 |
+
|
| 149 |
+
Example:
|
| 150 |
+
>>> update_user_name('Jane Doe')
|
| 151 |
+
"""
|
| 152 |
+
response = self.update_user_info('name', name)
|
| 153 |
+
return f"{response}"
|
| 154 |
+
|
| 155 |
+
def update_user_email(self, email: str) -> str:
|
| 156 |
+
"""
|
| 157 |
+
Updates the user's email in the Chainlit user session.
|
| 158 |
+
|
| 159 |
+
Args:
|
| 160 |
+
email (str): The new email to set.
|
| 161 |
+
|
| 162 |
+
Returns:
|
| 163 |
+
str: Confirmation message.
|
| 164 |
+
|
| 165 |
+
Example:
|
| 166 |
+
>>> update_user_email('jane.doe@example.com')
|
| 167 |
+
"""
|
| 168 |
+
response = self.update_user_info('email', email)
|
| 169 |
+
return f"{response}"
|
| 170 |
+
|
| 171 |
+
def update_user_picture(self, picture: str) -> str:
|
| 172 |
+
"""
|
| 173 |
+
Updates the user's picture URL in the Chainlit user session.
|
| 174 |
+
|
| 175 |
+
Args:
|
| 176 |
+
picture (str): The new picture URL to set.
|
| 177 |
+
|
| 178 |
+
Returns:
|
| 179 |
+
str: Confirmation message.
|
| 180 |
+
|
| 181 |
+
Example:
|
| 182 |
+
>>> update_user_picture('https://example.com/new_picture.jpg')
|
| 183 |
+
"""
|
| 184 |
+
response = self.update_user_info('picture', picture)
|
| 185 |
+
return f"{response}"
|