row_id
int64 0
48.4k
| init_message
stringlengths 1
342k
| conversation_hash
stringlengths 32
32
| scores
dict |
|---|---|---|---|
40,959
|
What is example of yaml?
|
50cc89e5329c145b267ae84105a5752d
|
{
"intermediate": 0.2972811460494995,
"beginner": 0.4592927396297455,
"expert": 0.24342605471611023
}
|
40,960
|
В Visual Prolog 7.5 заданы факты вида parent (a,b): a - родитель для b, и факты вида man(c): с - мужчина.
Используя эти предикаты, определить:
a) отношение grand_nephew(X,Y): X является внучатым племянником для Y (т.е. X - внук сестры или брата Y).
б) отношение has cousin Kim(X): у человека X есть двоюродный брат или сестра по имени Kim.
Написать полный код
Вот пример кода в Visual Prolog 7.5:
implement main
open core, console, string
domains
class facts
parent:(string).
man:(string).
class predicates
solve: (string, string) nondeterm (i,o).
clauses
parent("Корнеев").
parent("Докшин").
parent("Матвеев").
parent("Скобелев").
man("Офицер").
man("Пекарь").
man("Врач").
man("Инженер").
%Корнеев не пекарь и не офицер, потому что пекарь ходит пешком, а офицер не живет с врачом.
solve(X, Y):- parent(X), man(Y),
X="Корнеев"
, Y<>"Пекарь", Y<>"Офицер".
%Докшин не офицер потому что младше его и не пекарь потому что старше его.
solve(X, Y):- parent(X), man(Y),
X="Докшин", Y<>"Офицер", Y<>"Пекарь".
%Скобелев не инженер, потому что инженер либо Корнеев, Матвеев или Докшин
solve(X, Y):- parent(X), man(Y),
X="Скобелев", Y<>"Инженер".
%%мареев не офицер потому что докшин старше мареева и Кареева
solve(X, Y):- parent(X), man(Y),X="Матвеев", Y<>"Офицер".
solve(X, Y):- parent(X), man(Y).
clauses
run() :-init(),
solve("Корнеев", Y1),
solve("Докшин", Y2),
solve("Матвеев", Y3),
solve("Скобелев", Y4),
Y1<>Y2,Y2<>Y3,Y1<>Y4,Y3<>Y4,Y2<>Y4,Y1<>Y3,
write("Корнеев ", Y1), nl,write("Докшин ", Y2), nl,write("Матвеев ", Y3),nl,write("Скобелев ", Y4), _=readLine(),!
;
_=readLine().
end implement main
goal
console::run(main::run).
|
8cdbc3c4dab2ccaedbc72528a9b2d7fe
|
{
"intermediate": 0.223744198679924,
"beginner": 0.6278908848762512,
"expert": 0.14836488664150238
}
|
40,961
|
how to get the application name from the text in python if the text is like ATS (Keystrokes) Features Tracker - Google Sheets - High memory usage - 989 MB - Brave - Profile 1
|
2de953ee50836d70b1f27ede9cef503f
|
{
"intermediate": 0.48068150877952576,
"beginner": 0.16891899704933167,
"expert": 0.35039952397346497
}
|
40,962
|
I have a macro enabled word document that contains data within a table.
The word document layout is set to minimal margin spaces, orientation portrait and size A4.
This layout is critical for the printing of the document.
The doucment also has a VBA code that pastes the table in an email client.
Unfortunately, when the document is pasted into the email client the document size (A4) is not retained,
making it impossible to print from the email.
Is there a way of controlling the paste to retain the exact document table size.
Here is the current VBA code:
Option Explicit
Private Sub CommandButton1_Click()
Dim OutApp As Object
Dim OutMail As Object
Dim tbl As Word.Table
Dim htmlBody As String
' Get the Word table
Set tbl = ActiveDocument.Tables(1)
tbl.Select
' Copy the table as HTML
tbl.Range.Copy
' Send to Outlook
Set OutApp = CreateObject("Outlook.Application")
Set OutMail = OutApp.CreateItem(0)
With OutMail
.To = "finance@magdalen.northants.sch.uk"
.Subject = "Facilities Internal Order Form"
.htmlBody = htmlBody ' Set the email body as HTML
.Display ' Or use .Send to send directly
End With
' Paste as HTML
OutMail.GetInspector.WordEditor.Range.PasteAndFormat wdFormatOriginalFormatting
Set OutMail = Nothing
Set OutApp = Nothing
End Sub
Private Sub Document_Open()
AppendCellValuesToTable
End Sub
Private Sub Document_Close()
Me.Saved = True
End Sub
|
1001f4874c9f91782c495887876b297d
|
{
"intermediate": 0.4775773882865906,
"beginner": 0.2949061989784241,
"expert": 0.22751636803150177
}
|
40,963
|
В Visual Prolog 7.5 заданы факты вида parent (a,b): a - родитель для b, и факты вида man©: с - мужчина.
Используя эти предикаты, определить:
a) отношение grand_nephew(X,Y): X является внучатым племянником для Y (т.е. X - внук сестры или брата Y).
б) отношение has cousin Kim(X): у человека X есть двоюродный брат или сестра по имени Kim.
Написать полный код
Вот пример кода в Visual Prolog 7.5:
implement main
open core
domains
gender = female(); male().
class facts - familyDB
person : (string Name, gender Gender).
parent : (string Person, string Parent).
class predicates
father : (string Person, string Father) nondeterm anyflow.
clauses
person("Judith",female()).
person("Bill",male()).
person("John",male()).
person("Pam",female()).
parent("John","Judith").
parent("Bill","John").
parent("Pam","Bill").
clauses
father(Person, Father) :-
parent(Person, Father),
person(Father, male()).
class predicates
grandFather : (string Person, string GrandFather) nondeterm (0,0).
clauses
grandFather(Person, GrandFather) :-
parent(Person, Parent),
father(Parent, GrandFather).
class predicates
ancestor : (string Person, string Ancestor) nondeterm (i,o).
clauses
ancestor(Person, Ancestor) :-
parent(Person, Ancestor).
ancestor(Person, Ancestor) :-
parent(Person, P1),
ancestor(P1, Ancestor).
class predicates
reconsult : (string FileName).
clauses
reconsult(FileName) :-
retractFactDB(familyDB),
file::consult(FileName, familyDB).
clauses
run():-
console::init(),
stdIO:: write("Load data\n"),
reconsult("…\\\fa.txt"),
stdIO:: write("\nfather test\n"),
father(X, Y),
stdIO::writef("% is the father of %\n",
Y, X),
fail.
run():-
stdIO:: write("\grandfather test\n"),
grandFather(X, Y),
stdIO:: writef("% is the grandfather of %\n", Y, X),
fail.
run():-
stdIO:: write("\n ancestor of Pam test\n"),
X = "Pam",
ancestor(X, Y),
stdIO:: writef("% is the ancestor of %\n", Y, X),
fail.
run():-
stdIO:: write("End of test\n"),
programControl:: sleep(9000),
succeed().
end implement main
goal
mainExe::run(main::run).
|
1013a27069157f50e9487cd8d683ed90
|
{
"intermediate": 0.3432740867137909,
"beginner": 0.4836038053035736,
"expert": 0.17312213778495789
}
|
40,964
|
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "markdown",
"source": [
"### Bienvenue dans mon projet pour conquerir le monde\n",
"\n",
"\n",
"---Nos objectif \\\n",
"Devenir riche \\\n",
"Conquerire le monde \\\n",
"Devenir immortel \\\n",
"\n",
"---Plan \\\n",
"créé un ia de trading pour avoir les première liquidité \\\n",
"la revendre\n",
"\n",
"\n",
"--- Ce qui nous empèche de conquèrir le monde : \\\n",
"les heures d'hiver et d'automne0"
],
"metadata": {
"id": "qnQm2vcsgDsv"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"import requests\n",
"import re\n",
"import subprocess\n",
"from datetime import datetime, timedelta\n",
"from pathlib import Path\n"
],
"metadata": {
"id": "zOseKr_vSvoa"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"| Option | Alias | Default | Description |\n",
"| ------ | ----- | ----- | ----- |\n",
"| --instrument | -i | | Trading instrument id. View list |\n",
"| --date-from | -from | | From date (yyyy-mm-dd) |\n",
"| --date-to | -to | now | To date (yyyy-mm-dd or 'now') |\n",
"| --timeframe | -t | d1 | Timeframe aggregation (tick, s1, m1, m5, m15, m30, h1, h4, d1, mn1) |\n",
"| --price-type | -p | bid | Price type: (bid, ask) |\n",
"| --utc-offset | -utc | 0 | UTC offset in minutes |\n",
"| --volumes | -v | false | Include volumes |\n",
"| --volume-units | -vu | millions | Volume units (millions, thousands, units) |\n",
"| --flats | -fl | false | Include flats (0 volumes) |\n",
"| --format | -f | json | Output format (csv, json, array). View output examples |\n",
"| --directory | -dir | ./download | Download directory |\n",
"| --batch-size | -bs | 10 | Batch size of downloaded artifacts |\n",
"| --batch-pause | -bp | 1000 | Pause between batches in ms |\n",
"| --cache | -ch | false | Use cache |\n",
"| --cache-path | -chpath | ./dukascopy-cache | Folder path for cache data |\n",
"| --retries | -r | 0 | Number of retries for a failed artifact download |\n",
"| --retry-on-empty | -re | false | A flag indicating whether requests with successful but empty (0 Bytes) responses should be retried. If retries is 0, this parameter will be ignored |\n",
"| --no-fail-after-retries | -fr | false | A flag indicating whether the process should fail after all retries have been exhausted. If retries is 0, this parameter will be ignored |\n",
"| --retry-pause | -rp | 500 | Pause between retries in milliseconds |\n",
"| --debug | -d | false | Output extra debugging |\n",
"| --silent | -s | false | Hides the search config in the CLI output |\n",
"| --inline | -in | false | Makes files smaller in size by removing new lines in the output (works only with json and array formats) |\n",
"| --file-name | -fn | | Custom file name for the generated file |\n",
"| --help | -h | | display help for command |"
],
"metadata": {
"id": "F7iDYaeVQYh1"
}
},
{
"cell_type": "code",
"source": [
"# Paramètres de la commande\n",
"instrument = \"eurusd\"\n",
"type_de_donnees = \"m1\"\n",
"format_fichier = \"csv\"\n",
"debut = pd.Timestamp(2024, 1, 1)\n",
"fin = pd.Timestamp(2024, 2, 13)\n",
"\n",
"# Générer le chemin du répertoire de contenu et construire la commande\n",
"content_dir = Path(\"/content\")\n",
"commande = f\"npx dukascopy-node -i {instrument} -from {debut:%Y-%m-%d} -to {fin:%Y-%m-%d} -v {True} -t {type_de_donnees} -f {format_fichier}\"\n",
"\n",
"# Exécution de la commande et capture de la sortie\n",
"try:\n",
" resultat = subprocess.run(commande, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)\n",
" sortie_commande = resultat.stdout\n",
" print(sortie_commande)\n",
"\n",
" # Extraction du nom du fichier CSV et traitement des données\n",
" match = re.search(r\"File saved: (\\S+)\", sortie_commande)\n",
" if match:\n",
" chemin_fichier_csv = match.group(1)\n",
" chemin_fichier_csv = content_dir / chemin_fichier_csv # Avec pathlib, nous assurons l’uniformité du chemin\n",
" print(\"Chemin du fichier CSV:\", chemin_fichier_csv)\n",
"\n",
" # Lecture et traitement du fichier CSV\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'], unit='ms').dt.strftime('%Y-%m-%d %H:%M')\n",
" donnees = donnees.rename(columns={'timestamp': 'timestamp'})\n",
" donnees.to_csv(chemin_fichier_csv, index=False)\n",
" print(f\"Le fichier CSV a été mis à jour avec les timestamps formatés : {chemin_fichier_csv}\")\n",
" print(donnees.head())\n",
" except Exception as e:\n",
" print(\"Erreur lors de la lecture ou de la conversion du fichier CSV:\", e)\n",
" else:\n",
" print(\"Le nom du fichier n’a pas pu être extrait.\")\n",
"except subprocess.CalledProcessError as e:\n",
" print(\"Erreur lors de l’exécution de la commande:\", e)\n"
],
"metadata": {
"id": "9f05Bp4AslcA",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "62390ad0-8f52-4e70-ad36-d8dd23e6f55e"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"npx: installed 30 in 7.213s\n",
"----------------------------------------------------\n",
"Downloading historical price data for:\n",
"----------------------------------------------------\n",
"Instrument: Euro vs US Dollar\n",
"Timeframe: m1\n",
"From date: Jan 1, 2024, 12:00 AM\n",
"To date: Feb 13, 2024, 12:00 AM\n",
"Price type: bid\n",
"Volumes: true\n",
"UTC Offset: 0\n",
"Include flats: false\n",
"Format: csv\n",
"----------------------------------------------------\n",
"----------------------------------------------------\n",
"√ File saved: download/eurusd-m1-bid-2024-01-01-2024-02-13.csv (2.6 MB)\n",
"\n",
"Download time: 14.9s\n",
"\n",
"\n",
"Chemin du fichier CSV: /content/download/eurusd-m1-bid-2024-01-01-2024-02-13.csv\n",
"Le fichier CSV a été mis à jour avec les timestamps formatés : /content/download/eurusd-m1-bid-2024-01-01-2024-02-13.csv\n",
" timestamp open high low close volume\n",
"0 2024-01-01 22:00 1.10427 1.10429 1.10425 1.10429 5.9\n",
"1 2024-01-01 22:01 1.10429 1.10429 1.10429 1.10429 8.1\n",
"2 2024-01-01 22:02 1.10429 1.10429 1.10429 1.10429 9.9\n",
"3 2024-01-01 22:03 1.10429 1.10429 1.10426 1.10426 18.9\n",
"4 2024-01-01 22:04 1.10426 1.10431 1.10424 1.10425 18.0\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" # Chargement du fichier CSV dans un DataFrame pandas\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
"\n",
" # Assurer que la colonne 'timestamp' est au bon format de date-heure\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
"\n",
" # Formatage de la date-heure de la requête pour correspondre au format du DataFrame\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
"\n",
" # Filtrer pour obtenir les données de la minute spécifiée\n",
" info_minute = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(minutes=1))\n",
" ]\n",
"\n",
" # Vérifier si des données ont été trouvées et les retourner\n",
" if not info_minute.empty:\n",
" # On utilise iloc[0] pour obtenir le premier enregistrement correspondant\n",
" return info_minute.iloc[0].to_dict()\n",
" else:\n",
" # Aucune donnée correspondante n’a été trouvée\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n"
],
"metadata": {
"id": "ZiyMguNXIv5M"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"\n",
"date_heure_str = \"2024-01-05 11:59\"\n",
"\n",
"# Utilisation de la fonction pour récupérer les informations\n",
"informations = Data(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage des informations récupérées\n",
"if informations:\n",
" print(f\"Informations pour {date_heure_str}:\")\n",
" print(f\"Open: {informations['open']}\")\n",
" print(f\"High: {informations['high']}\")\n",
" print(f\"Low: {informations['low']}\")\n",
" print(f\"Close: {informations['close']}\")\n",
" print(f\"Volume: {informations['volume']}\")\n",
"else:\n",
" print(\"Aucune information trouvée pour la minute spécifiée.\")\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "MsTWF5PjI5_Z",
"outputId": "54d8af17-5876-4929-bce6-84a1af418ece"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Informations pour 2024-01-05 11:59:\n",
"Open: 1.09132\n",
"High: 1.09143\n",
"Low: 1.09132\n",
"Close: 1.09142\n",
"Volume: 104.55999755859376\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"api_key = 'cnguep9r01qhlsli99igcnguep9r01qhlsli99j0'\n",
"symbol = 'EUR/USD'\n",
"interval = '1min'\n",
"count = 10\n",
"\n",
"url = f'https://finnhub.io/api/v1/forex/candles?symbol={symbol}&resolution={interval}&count={count}'\n",
"headers = {'X-Finnhub-Token': api_key}\n",
"\n",
"response = requests.get(url, headers=headers)\n",
"\n",
"# Imprimez la réponse brute de l'API\n",
"print(response.text)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Nk5vmJ1icz_T",
"outputId": "3d272585-e3c1-4ca7-b736-323660248f5a"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"<!DOCTYPE html>\n",
"<html lang=\"en\">\n",
"<head>\n",
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\"/>\n",
" <meta name=\"author\" content=\"Finnhub.io\"/>\n",
"\n",
" <meta name=\"viewport\" content=\"width=device-width, initial-scale=1, maximum-scale=5\">\n",
" <link rel=\"shortcut icon\"\n",
" href=\"/static/img/webp/finnhub-logo.webp\"/>\n",
"\n",
" \n",
" <script async src=\"https://www.googletagmanager.com/gtag/js?id=UA-64068110-3\"></script>\n",
" <script>\n",
" window.dataLayer = window.dataLayer || [];\n",
"\n",
" function gtag() {\n",
" dataLayer.push(arguments);\n",
" }\n",
"\n",
" gtag('js', new Date());\n",
"\n",
" gtag('config', 'UA-64068110-3');\n",
" </script>\n",
"\n",
" <link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css\">\n",
"\n",
" <script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js\"></script>\n",
" <script src='https://cdnjs.cloudflare.com/ajax/libs/jquery-easing/1.3/jquery.easing.min.js'></script>\n",
"\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.0/umd/popper.min.js\"></script>\n",
" <script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/4.1.0/js/bootstrap.min.js\"></script>\n",
"\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/react/16.8.0/umd/react.production.min.js\"></script>\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/react-dom/16.8.0/umd/react-dom.production.min.js\"></script>\n",
"\n",
" <link rel=\"stylesheet\" href=\"https://use.fontawesome.com/releases/v6.0.0/css/all.css\">\n",
"\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/waypoints/4.0.1/jquery.waypoints.min.js\"></script>\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/redux/3.3.1/redux.min.js\"></script>\n",
" <script src=\"https://unpkg.com/react-redux@5.0.7/dist/react-redux.min.js\"></script>\n",
" <script type=\"text/javascript\">\n",
"\n",
" var protocol = location.protocol;\n",
" var wsprotocol\n",
" if (protocol === \"https:\") {\n",
" wsprotocol = \"wss:\";\n",
" } else {\n",
" wsprotocol = \"ws:\";\n",
" }\n",
" var slashes = protocol.concat(\"//\");\n",
" var host = slashes.concat(window.location.host);\n",
"\n",
" window.wsUrl = \"\"\n",
" if (window.wsUrl === '') {\n",
" window.wsUrl = window.location.host\n",
" }\n",
"\n",
" window.getCookie = function getcookie(cname) {\n",
" var name = cname + \"=\";\n",
" var decodedCookie = decodeURIComponent(document.cookie);\n",
" var ca = decodedCookie.split(';');\n",
" for (var i = 0; i < ca.length; i++) {\n",
" var c = ca[i];\n",
" while (c.charAt(0) == ' ') {\n",
" c = c.substring(1);\n",
" }\n",
" if (c.indexOf(name) == 0) {\n",
" return c.substring(name.length, c.length);\n",
" }\n",
" }\n",
" return \"\";\n",
" }\n",
"\n",
" window.setCookie = function setcookie(cname, cvalue, exdays) {\n",
" var d = new Date();\n",
" d.setTime(d.getTime() + (exdays * 24 * 60 * 60 * 1000));\n",
" var expires = \"expires=\" + d.toUTCString();\n",
" document.cookie = cname + \"=\" + cvalue + \";\" + expires + \";path=/\";\n",
" }\n",
"\n",
" window.toTitle = function toTitleCase(str) {\n",
" return str.replace(/\\w\\S*/g, function (txt) {\n",
" return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();\n",
" });\n",
" }\n",
"\n",
" window.getUrlParams = function (key) {\n",
" let url = location.href\n",
" var params = {};\n",
" (url + '?').split('?')[1].split('&').forEach(function (pair) {\n",
" pair = (pair + '=').split('=').map(decodeURIComponent);\n",
" if (pair[0].length) {\n",
" params[pair[0]] = pair[1];\n",
" }\n",
" });\n",
" return params[key];\n",
" };\n",
"\n",
" window.stripHtml = function (html) {\n",
" let tmp = document.createElement(\"DIV\");\n",
" tmp.innerHTML = html;\n",
" return tmp.textContent || tmp.innerText || \"\";\n",
" }\n",
"\n",
" window.timeConverter = function tConverter(UNIX_timestamp) {\n",
" var a = new Date(UNIX_timestamp * 1000);\n",
" var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];\n",
" var dates = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']\n",
" var month = months[a.getMonth()];\n",
" var date = a.getDate();\n",
" var time = dates[a.getDay()] + ' ' + month + ' ' + date\n",
" return time;\n",
" }\n",
"\n",
" window.roundToX = function (num, X) {\n",
" return +(Math.round(num + \"e+\" + X) + \"e-\" + X);\n",
" }\n",
" </script>\n",
"\n",
"\n",
" <meta name=\"description\"\n",
" content=\"Finnhub - Free stock API for realtime market data, global company fundamentals, economic data, and alternative data.\"/>\n",
"<meta name=\"keywords\"\n",
" content=\"stock api,realtime,stock,equity,forex,crypto,fundamental data,economic data,alternative data\">\n",
"<meta property=\"og:title\" content=\"Finnhub - Free realtime APIs for stock, forex and cryptocurrency.\">\n",
"<meta property=\"og:type\" content=\"website\">\n",
"<meta property=\"og:url\" content=\"https://finnhub.io/\">\n",
"<meta property=\"og:image\"\n",
" content=\"https://static.finnhub.io/img/finnhub_2020-05-09_20_51/logo/logo-transparent-thumbnail.png\">\n",
"<meta property=\"og:description\"\n",
" content=\"Finnhub - Free APIs for realtime stock, forex, and cryptocurrency. Company fundamentals, Economic data, and Alternative data.\">\n",
"<meta name=\"twitter:card\" content=\"summary\">\n",
"<meta name=\"twitter:title\"\n",
" content=\"Finnhub - Free realtime APIs for stock, forex and cryptocurrency.\">\n",
"<meta name=\"twitter:image\"\n",
" content=\"https://static.finnhub.io/img/finnhub_2020-05-09_20_51/logo/logo-transparent-thumbnail.png\">\n",
"<meta name=\"twitter:description\"\n",
" content=\"Finnhub - Free APIs for realtime stock, forex, and cryptocurrency. Company fundamentals, Economic data, and Alternative data.\">\n",
"<meta charset=\"UTF-8\">\n",
"\n",
" <title>Finnhub Stock APIs - Real-time stock prices, Company fundamentals, Estimates, and Alternative data.</title>\n",
"\n",
" <script type=\"text/javascript\">\n",
" window.serverData = {\"announcement\":\"Global Filings Search is now available on Finnhub. Check out \\u003ca href=\\\"/docs/api/global-filings-search\\\"\\u003ehere\\u003c/a\\u003e\"}\n",
" window.login = false \n",
" window.isSubscribe = null \n",
" </script>\n",
"</head>\n",
"<body>\n",
"<div id=\"root\"></div>\n",
"<script type=\"text/javascript\" src=\"/static/js/webpack/dist/main-index.bundle.357882d13b93982f42b7.js\"></script></body>\n",
"</html>\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"### Après le fichier\n",
"\n",
"Maintenant que l'on a récupéré ce petit csv de merde\n",
"- 30 dernière minute : open + high + low + volume = 150 (on commence de la plus recent a la plus ancienne)\n",
"- 30 dernière : pareil = 150\n",
"- 30 dernier jours : pareil = 150\n",
"- 7 jour de la semaine = 7 (de lundi a dimanche)\n",
"- 31 jour du mois = 31\n",
"- 12 mois de l'anéee = 12\n"
],
"metadata": {
"id": "eOP-2yDs6cBk"
}
},
{
"cell_type": "markdown",
"source": [
"# Liste pour les 30 derniers minutes"
],
"metadata": {
"id": "76Amy6xU7YuB"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée dans la question initiale.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" # Chargement du fichier CSV dans un DataFrame pandas.\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
"\n",
" # Assurer que la colonne timestamp est au bon format de date-heure.\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
"\n",
" # Formatage de la date-heure de la requête pour correspondre au format du DataFrame.\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
"\n",
" # Filtrer pour obtenir les données de la minute spécifiée.\n",
" info_minute = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(minutes=1))\n",
" ]\n",
"\n",
" # Vérifier si des données ont été trouvées et les retourner.\n",
" if not info_minute.empty:\n",
" # On utilise iloc[0] pour obtenir le premier enregistrement correspondant.\n",
" return info_minute.iloc[0].to_dict()\n",
" else:\n",
" # Aucune donnée correspondante n’a été trouvée.\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"# Fonction pour lister les données sur les 30 dernières minutes.\n",
"def liste_donnees_30min(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" # Convertir la date_heure_str en objet datetime.\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
" # Boucle pour récupérer les données minute par minute de la plus récente à la plus vieille.\n",
" for i in range(31):\n",
" # Le point de départ pour chaque minute.\n",
" minute_debut = date_heure_finale - pd.Timedelta(minutes=i)\n",
" # Convertir en chaîne de caractères ISO pour l’appel de fonction.\n",
" minute_debut_str = minute_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" # Utiliser la fonction Data pour obtenir les données.\n",
" donnee_minute = Data(chemin_fichier_csv, minute_debut_str)\n",
" # Si des données sont trouvées, les ajouter à la liste.\n",
" if donnee_minute:\n",
" liste_donnees.append(donnee_minute)\n",
"\n",
" # Renvoyer la liste complète des données des 30 dernières minutes.\n",
" return liste_donnees\n",
"\n",
"# Exemple d’utilisation.\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure au format correspondant à vos données.\n",
"\n",
"# Appel de la fonction.\n",
"donnees_30_dernieres_minutes = liste_donnees_30min(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage des résultats.\n",
"for donnee in donnees_30_dernieres_minutes:\n",
" print(donnee)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "nFmUPW6NN00u",
"outputId": "5b1ab0c1-502d-4dff-9d43-ccf20d822d23"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"{'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}\n",
"{'timestamp': Timestamp('2024-02-01 12:44:00'), 'open': 1.08143, 'high': 1.08178, 'low': 1.08142, 'close': 1.08178, 'volume': 524.3599853515625}\n",
"{'timestamp': Timestamp('2024-02-01 12:43:00'), 'open': 1.08143, 'high': 1.0815, 'low': 1.08139, 'close': 1.08142, 'volume': 476.7699890136719}\n",
"{'timestamp': Timestamp('2024-02-01 12:42:00'), 'open': 1.08155, 'high': 1.08157, 'low': 1.08142, 'close': 1.08145, 'volume': 553.02001953125}\n",
"{'timestamp': Timestamp('2024-02-01 12:41:00'), 'open': 1.08146, 'high': 1.08152, 'low': 1.08141, 'close': 1.08151, 'volume': 171.00999450683594}\n",
"{'timestamp': Timestamp('2024-02-01 12:40:00'), 'open': 1.0813, 'high': 1.08147, 'low': 1.08125, 'close': 1.08147, 'volume': 460.5700073242188}\n",
"{'timestamp': Timestamp('2024-02-01 12:39:00'), 'open': 1.08118, 'high': 1.08131, 'low': 1.08116, 'close': 1.08131, 'volume': 339.1099853515625}\n",
"{'timestamp': Timestamp('2024-02-01 12:38:00'), 'open': 1.08107, 'high': 1.08118, 'low': 1.08107, 'close': 1.08117, 'volume': 337.29998779296875}\n",
"{'timestamp': Timestamp('2024-02-01 12:37:00'), 'open': 1.08111, 'high': 1.08126, 'low': 1.08106, 'close': 1.08106, 'volume': 223.5}\n",
"{'timestamp': Timestamp('2024-02-01 12:36:00'), 'open': 1.08101, 'high': 1.08117, 'low': 1.081, 'close': 1.08111, 'volume': 184.509994506836}\n",
"{'timestamp': Timestamp('2024-02-01 12:35:00'), 'open': 1.08092, 'high': 1.08104, 'low': 1.08078, 'close': 1.08104, 'volume': 401.8900146484375}\n",
"{'timestamp': Timestamp('2024-02-01 12:34:00'), 'open': 1.08102, 'high': 1.08106, 'low': 1.08093, 'close': 1.08093, 'volume': 187.9900054931641}\n",
"{'timestamp': Timestamp('2024-02-01 12:33:00'), 'open': 1.08118, 'high': 1.08119, 'low': 1.08096, 'close': 1.08102, 'volume': 492.9599914550781}\n",
"{'timestamp': Timestamp('2024-02-01 12:32:00'), 'open': 1.08124, 'high': 1.08126, 'low': 1.08097, 'close': 1.08117, 'volume': 396.239990234375}\n",
"{'timestamp': Timestamp('2024-02-01 12:31:00'), 'open': 1.08153, 'high': 1.08155, 'low': 1.08122, 'close': 1.08123, 'volume': 237.1199951171875}\n",
"{'timestamp': Timestamp('2024-02-01 12:30:00'), 'open': 1.08145, 'high': 1.08159, 'low': 1.08144, 'close': 1.08153, 'volume': 235.19000244140625}\n",
"{'timestamp': Timestamp('2024-02-01 12:29:00'), 'open': 1.08128, 'high': 1.08144, 'low': 1.08126, 'close': 1.08144, 'volume': 298.2900085449219}\n",
"{'timestamp': Timestamp('2024-02-01 12:28:00'), 'open': 1.08111, 'high': 1.0813, 'low': 1.08109, 'close': 1.08128, 'volume': 151.35000610351562}\n",
"{'timestamp': Timestamp('2024-02-01 12:27:00'), 'open': 1.08116, 'high': 1.08119, 'low': 1.08111, 'close': 1.08112, 'volume': 168.67999267578125}\n",
"{'timestamp': Timestamp('2024-02-01 12:26:00'), 'open': 1.08121, 'high': 1.08122, 'low': 1.08116, 'close': 1.08117, 'volume': 314.760009765625}\n",
"{'timestamp': Timestamp('2024-02-01 12:25:00'), 'open': 1.08108, 'high': 1.08131, 'low': 1.08108, 'close': 1.08122, 'volume': 277.32000732421875}\n",
"{'timestamp': Timestamp('2024-02-01 12:24:00'), 'open': 1.08122, 'high': 1.08132, 'low': 1.08107, 'close': 1.08109, 'volume': 328.8699951171875}\n",
"{'timestamp': Timestamp('2024-02-01 12:23:00'), 'open': 1.08118, 'high': 1.08129, 'low': 1.08117, 'close': 1.08121, 'volume': 426.1099853515625}\n",
"{'timestamp': Timestamp('2024-02-01 12:22:00'), 'open': 1.08124, 'high': 1.08128, 'low': 1.08116, 'close': 1.08117, 'volume': 388.7900085449219}\n",
"{'timestamp': Timestamp('2024-02-01 12:21:00'), 'open': 1.08123, 'high': 1.08134, 'low': 1.08121, 'close': 1.08123, 'volume': 447.4100036621094}\n",
"{'timestamp': Timestamp('2024-02-01 12:20:00'), 'open': 1.08167, 'high': 1.08167, 'low': 1.08122, 'close': 1.08122, 'volume': 224.88999938964844}\n",
"{'timestamp': Timestamp('2024-02-01 12:19:00'), 'open': 1.08143, 'high': 1.0817, 'low': 1.08143, 'close': 1.08168, 'volume': 90.05000305175781}\n",
"{'timestamp': Timestamp('2024-02-01 12:18:00'), 'open': 1.08117, 'high': 1.08143, 'low': 1.08113, 'close': 1.08143, 'volume': 273.9599914550781}\n",
"{'timestamp': Timestamp('2024-02-01 12:17:00'), 'open': 1.08108, 'high': 1.08119, 'low': 1.08108, 'close': 1.08117, 'volume': 269.0400085449219}\n",
"{'timestamp': Timestamp('2024-02-01 12:16:00'), 'open': 1.08123, 'high': 1.08127, 'low': 1.08106, 'close': 1.08108, 'volume': 401.4700012207031}\n",
"{'timestamp': Timestamp('2024-02-01 12:15:00'), 'open': 1.08122, 'high': 1.08138, 'low': 1.08118, 'close': 1.08124, 'volume': 482.1799926757813}\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# 30 derniers jours"
],
"metadata": {
"id": "7YxteF4kN32H"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"# Fonction pour lister les données sur les 7 derniers jours.\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
" # Nous parcourons maintenant seulement 7 jours, donc range(7)\n",
" for i in range(30, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_30_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 7 derniers jours :\")\n",
"print(donnees_30_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "VwDg9tVROFQK",
"outputId": "9202db03-19d2-4f5b-b84c-87f6e6fd3e35"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 7 derniers jours :\n",
"[{'timestamp': Timestamp('2024-01-02 12:45:00'), 'open': 1.09608, 'high': 1.09619, 'low': 1.09607, 'close': 1.09615, 'volume': 192.1300048828125}, {'timestamp': Timestamp('2024-01-03 12:45:00'), 'open': 1.09205, 'high': 1.09217, 'low': 1.09192, 'close': 1.09208, 'volume': 380.6900024414063}, {'timestamp': Timestamp('2024-01-04 12:45:00'), 'open': 1.0955, 'high': 1.0955, 'low': 1.0952, 'close': 1.09534, 'volume': 107.33999633789062}, {'timestamp': Timestamp('2024-01-05 12:45:00'), 'open': 1.09184, 'high': 1.09196, 'low': 1.09182, 'close': 1.09195, 'volume': 105.77999877929688}, {'timestamp': Timestamp('2024-01-07 22:04:00'), 'open': 1.09386, 'high': 1.0939, 'low': 1.09376, 'close': 1.0939, 'volume': 28.600000381469727}, {'timestamp': Timestamp('2024-01-08 12:45:00'), 'open': 1.0942, 'high': 1.09436, 'low': 1.0942, 'close': 1.0943, 'volume': 109.26000213623048}, {'timestamp': Timestamp('2024-01-09 12:45:00'), 'open': 1.09324, 'high': 1.09341, 'low': 1.09319, 'close': 1.09338, 'volume': 175.22999572753906}, {'timestamp': Timestamp('2024-01-10 12:45:00'), 'open': 1.09447, 'high': 1.09448, 'low': 1.09439, 'close': 1.09442, 'volume': 248.8500061035156}, {'timestamp': Timestamp('2024-01-11 12:45:00'), 'open': 1.09844, 'high': 1.09862, 'low': 1.09841, 'close': 1.0986, 'volume': 214.6000061035156}, {'timestamp': Timestamp('2024-01-12 12:45:00'), 'open': 1.09432, 'high': 1.09432, 'low': 1.09412, 'close': 1.09416, 'volume': 160.11000061035156}, {'timestamp': Timestamp('2024-01-14 22:00:00'), 'open': 1.0948, 'high': 1.09484, 'low': 1.09476, 'close': 1.09476, 'volume': 10.399999618530272}, {'timestamp': Timestamp('2024-01-15 12:45:00'), 'open': 1.09546, 'high': 1.0956, 'low': 1.09541, 'close': 1.09541, 'volume': 139.64999389648438}, {'timestamp': Timestamp('2024-01-16 12:45:00'), 'open': 1.0885, 'high': 1.08852, 'low': 1.08844, 'close': 1.08846, 'volume': 153.57000732421875}, {'timestamp': Timestamp('2024-01-17 12:45:00'), 'open': 1.0872, 'high': 1.0872, 'low': 1.0871, 'close': 1.08715, 'volume': 216.75}, {'timestamp': Timestamp('2024-01-18 12:45:00'), 'open': 1.08786, 'high': 1.08798, 'low': 1.08786, 'close': 1.08798, 'volume': 135.25999450683594}, {'timestamp': Timestamp('2024-01-19 12:45:00'), 'open': 1.08825, 'high': 1.08825, 'low': 1.08815, 'close': 1.08816, 'volume': 176.00999450683594}, {'timestamp': Timestamp('2024-01-21 22:00:00'), 'open': 1.08906, 'high': 1.08918, 'low': 1.08905, 'close': 1.08909, 'volume': 5.5}, {'timestamp': Timestamp('2024-01-22 12:45:00'), 'open': 1.08977, 'high': 1.08981, 'low': 1.08958, 'close': 1.08962, 'volume': 271.3900146484375}, {'timestamp': Timestamp('2024-01-23 12:45:00'), 'open': 1.08625, 'high': 1.08627, 'low': 1.08616, 'close': 1.08621, 'volume': 221.3300018310547}, {'timestamp': Timestamp('2024-01-24 12:45:00'), 'open': 1.08952, 'high': 1.08956, 'low': 1.0895, 'close': 1.08954, 'volume': 150.52999877929688}, {'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# 7 derniers jours"
],
"metadata": {
"id": "HeT4Hg1pP0H1"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"# Fonction pour lister les données sur les 7 derniers jours.\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
" # Nous parcourons maintenant seulement 7 jours, donc range(7)\n",
" for i in range(7, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_7_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 7 derniers jours :\")\n",
"print(donnees_7_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "C9t40s2GP4PW",
"outputId": "0aa142c2-ec45-439e-a342-e4be3c7da950"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 7 derniers jours :\n",
"[{'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# 31 derniers jours"
],
"metadata": {
"id": "Vi2YecL_SBAW"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
"\n",
" for i in range(31, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_31_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 31 derniers jours :\")\n",
"print(donnees_31_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "4mA8CQjNSFQA",
"outputId": "0d728b04-ab3a-4a02-d7c4-d7364527ffdc"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 31 derniers jours :\n",
"[{'timestamp': Timestamp('2024-01-01 22:00:00'), 'open': 1.10427, 'high': 1.10429, 'low': 1.10425, 'close': 1.10429, 'volume': 5.900000095367432}, {'timestamp': Timestamp('2024-01-02 12:45:00'), 'open': 1.09608, 'high': 1.09619, 'low': 1.09607, 'close': 1.09615, 'volume': 192.1300048828125}, {'timestamp': Timestamp('2024-01-03 12:45:00'), 'open': 1.09205, 'high': 1.09217, 'low': 1.09192, 'close': 1.09208, 'volume': 380.6900024414063}, {'timestamp': Timestamp('2024-01-04 12:45:00'), 'open': 1.0955, 'high': 1.0955, 'low': 1.0952, 'close': 1.09534, 'volume': 107.33999633789062}, {'timestamp': Timestamp('2024-01-05 12:45:00'), 'open': 1.09184, 'high': 1.09196, 'low': 1.09182, 'close': 1.09195, 'volume': 105.77999877929688}, {'timestamp': Timestamp('2024-01-07 22:04:00'), 'open': 1.09386, 'high': 1.0939, 'low': 1.09376, 'close': 1.0939, 'volume': 28.600000381469727}, {'timestamp': Timestamp('2024-01-08 12:45:00'), 'open': 1.0942, 'high': 1.09436, 'low': 1.0942, 'close': 1.0943, 'volume': 109.26000213623048}, {'timestamp': Timestamp('2024-01-09 12:45:00'), 'open': 1.09324, 'high': 1.09341, 'low': 1.09319, 'close': 1.09338, 'volume': 175.22999572753906}, {'timestamp': Timestamp('2024-01-10 12:45:00'), 'open': 1.09447, 'high': 1.09448, 'low': 1.09439, 'close': 1.09442, 'volume': 248.8500061035156}, {'timestamp': Timestamp('2024-01-11 12:45:00'), 'open': 1.09844, 'high': 1.09862, 'low': 1.09841, 'close': 1.0986, 'volume': 214.6000061035156}, {'timestamp': Timestamp('2024-01-12 12:45:00'), 'open': 1.09432, 'high': 1.09432, 'low': 1.09412, 'close': 1.09416, 'volume': 160.11000061035156}, {'timestamp': Timestamp('2024-01-14 22:00:00'), 'open': 1.0948, 'high': 1.09484, 'low': 1.09476, 'close': 1.09476, 'volume': 10.399999618530272}, {'timestamp': Timestamp('2024-01-15 12:45:00'), 'open': 1.09546, 'high': 1.0956, 'low': 1.09541, 'close': 1.09541, 'volume': 139.64999389648438}, {'timestamp': Timestamp('2024-01-16 12:45:00'), 'open': 1.0885, 'high': 1.08852, 'low': 1.08844, 'close': 1.08846, 'volume': 153.57000732421875}, {'timestamp': Timestamp('2024-01-17 12:45:00'), 'open': 1.0872, 'high': 1.0872, 'low': 1.0871, 'close': 1.08715, 'volume': 216.75}, {'timestamp': Timestamp('2024-01-18 12:45:00'), 'open': 1.08786, 'high': 1.08798, 'low': 1.08786, 'close': 1.08798, 'volume': 135.25999450683594}, {'timestamp': Timestamp('2024-01-19 12:45:00'), 'open': 1.08825, 'high': 1.08825, 'low': 1.08815, 'close': 1.08816, 'volume': 176.00999450683594}, {'timestamp': Timestamp('2024-01-21 22:00:00'), 'open': 1.08906, 'high': 1.08918, 'low': 1.08905, 'close': 1.08909, 'volume': 5.5}, {'timestamp': Timestamp('2024-01-22 12:45:00'), 'open': 1.08977, 'high': 1.08981, 'low': 1.08958, 'close': 1.08962, 'volume': 271.3900146484375}, {'timestamp': Timestamp('2024-01-23 12:45:00'), 'open': 1.08625, 'high': 1.08627, 'low': 1.08616, 'close': 1.08621, 'volume': 221.3300018310547}, {'timestamp': Timestamp('2024-01-24 12:45:00'), 'open': 1.08952, 'high': 1.08956, 'low': 1.0895, 'close': 1.08954, 'volume': 150.52999877929688}, {'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"12 dernier mois"
],
"metadata": {
"id": "APxPvhVTT2OZ"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
"\n",
" for i in range(365, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_365_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 12 derniers mois :\")\n",
"print(donnees_365_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "hQ1WARdpT6c8",
"outputId": "778ba183-29d3-405d-b6a8-6c01620da107"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 12 derniers mois :\n",
"[{'timestamp': Timestamp('2024-01-01 22:00:00'), 'open': 1.10427, 'high': 1.10429, 'low': 1.10425, 'close': 1.10429, 'volume': 5.900000095367432}, {'timestamp': Timestamp('2024-01-02 12:45:00'), 'open': 1.09608, 'high': 1.09619, 'low': 1.09607, 'close': 1.09615, 'volume': 192.1300048828125}, {'timestamp': Timestamp('2024-01-03 12:45:00'), 'open': 1.09205, 'high': 1.09217, 'low': 1.09192, 'close': 1.09208, 'volume': 380.6900024414063}, {'timestamp': Timestamp('2024-01-04 12:45:00'), 'open': 1.0955, 'high': 1.0955, 'low': 1.0952, 'close': 1.09534, 'volume': 107.33999633789062}, {'timestamp': Timestamp('2024-01-05 12:45:00'), 'open': 1.09184, 'high': 1.09196, 'low': 1.09182, 'close': 1.09195, 'volume': 105.77999877929688}, {'timestamp': Timestamp('2024-01-07 22:04:00'), 'open': 1.09386, 'high': 1.0939, 'low': 1.09376, 'close': 1.0939, 'volume': 28.600000381469727}, {'timestamp': Timestamp('2024-01-08 12:45:00'), 'open': 1.0942, 'high': 1.09436, 'low': 1.0942, 'close': 1.0943, 'volume': 109.26000213623048}, {'timestamp': Timestamp('2024-01-09 12:45:00'), 'open': 1.09324, 'high': 1.09341, 'low': 1.09319, 'close': 1.09338, 'volume': 175.22999572753906}, {'timestamp': Timestamp('2024-01-10 12:45:00'), 'open': 1.09447, 'high': 1.09448, 'low': 1.09439, 'close': 1.09442, 'volume': 248.8500061035156}, {'timestamp': Timestamp('2024-01-11 12:45:00'), 'open': 1.09844, 'high': 1.09862, 'low': 1.09841, 'close': 1.0986, 'volume': 214.6000061035156}, {'timestamp': Timestamp('2024-01-12 12:45:00'), 'open': 1.09432, 'high': 1.09432, 'low': 1.09412, 'close': 1.09416, 'volume': 160.11000061035156}, {'timestamp': Timestamp('2024-01-14 22:00:00'), 'open': 1.0948, 'high': 1.09484, 'low': 1.09476, 'close': 1.09476, 'volume': 10.399999618530272}, {'timestamp': Timestamp('2024-01-15 12:45:00'), 'open': 1.09546, 'high': 1.0956, 'low': 1.09541, 'close': 1.09541, 'volume': 139.64999389648438}, {'timestamp': Timestamp('2024-01-16 12:45:00'), 'open': 1.0885, 'high': 1.08852, 'low': 1.08844, 'close': 1.08846, 'volume': 153.57000732421875}, {'timestamp': Timestamp('2024-01-17 12:45:00'), 'open': 1.0872, 'high': 1.0872, 'low': 1.0871, 'close': 1.08715, 'volume': 216.75}, {'timestamp': Timestamp('2024-01-18 12:45:00'), 'open': 1.08786, 'high': 1.08798, 'low': 1.08786, 'close': 1.08798, 'volume': 135.25999450683594}, {'timestamp': Timestamp('2024-01-19 12:45:00'), 'open': 1.08825, 'high': 1.08825, 'low': 1.08815, 'close': 1.08816, 'volume': 176.00999450683594}, {'timestamp': Timestamp('2024-01-21 22:00:00'), 'open': 1.08906, 'high': 1.08918, 'low': 1.08905, 'close': 1.08909, 'volume': 5.5}, {'timestamp': Timestamp('2024-01-22 12:45:00'), 'open': 1.08977, 'high': 1.08981, 'low': 1.08958, 'close': 1.08962, 'volume': 271.3900146484375}, {'timestamp': Timestamp('2024-01-23 12:45:00'), 'open': 1.08625, 'high': 1.08627, 'low': 1.08616, 'close': 1.08621, 'volume': 221.3300018310547}, {'timestamp': Timestamp('2024-01-24 12:45:00'), 'open': 1.08952, 'high': 1.08956, 'low': 1.0895, 'close': 1.08954, 'volume': 150.52999877929688}, {'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# ducoup voici ce que nous récupéront les quoicoupote"
],
"metadata": {
"id": "m2vVLZQEUW8P"
}
},
{
"cell_type": "code",
"source": [
"donnees_365_derniers_jours_liste\n",
"donnees_31_derniers_jours_liste\n",
"donnees_7_derniers_jours_liste\n",
"donnees_30_derniers_jours_liste\n",
"donnees_30_dernieres_minutes"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 193
},
"id": "G3RqMOdVUbdG",
"outputId": "d566ad25-4667-42bd-f882-2b5fbbe4ab5a"
},
"execution_count": null,
"outputs": [
{
"output_type": "error",
"ename": "NameError",
"evalue": "name 'donnees_30_derniers_jours' is not defined",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-14-7bb42c6ad7cd>\u001b[0m in \u001b[0;36m<cell line: 4>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mdonnees_31_derniers_jours_liste\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mdonnees_7_derniers_jours_liste\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mdonnees_30_derniers_jours\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0mdonnees_30_dernieres_minutes\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mNameError\u001b[0m: name 'donnees_30_derniers_jours' is not defined"
]
}
]
}
]
}
|
9d70adcb6b85e6fb8ede667b8aa219ab
|
{
"intermediate": 0.3493768870830536,
"beginner": 0.2688944339752197,
"expert": 0.3817286193370819
}
|
40,965
|
opptimise ce code google colab ipynb en évitant les répition de fonction par exemple et en faisant que ce sois le plus clair possible a lire voici le code :
{
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"provenance": []
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python"
}
},
"cells": [
{
"cell_type": "markdown",
"source": [
"### Bienvenue dans mon projet pour conquerir le monde\n",
"\n",
"\n",
"---Nos objectif \\\n",
"Devenir riche \\\n",
"Conquerire le monde \\\n",
"Devenir immortel \\\n",
"\n",
"---Plan \\\n",
"créé un ia de trading pour avoir les première liquidité \\\n",
"la revendre\n",
"\n",
"\n",
"--- Ce qui nous empèche de conquèrir le monde : \\\n",
"les heures d'hiver et d'automne0"
],
"metadata": {
"id": "qnQm2vcsgDsv"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"import requests\n",
"import re\n",
"import subprocess\n",
"from datetime import datetime, timedelta\n",
"from pathlib import Path\n"
],
"metadata": {
"id": "zOseKr_vSvoa"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "markdown",
"source": [
"| Option | Alias | Default | Description |\n",
"| ------ | ----- | ----- | ----- |\n",
"| --instrument | -i | | Trading instrument id. View list |\n",
"| --date-from | -from | | From date (yyyy-mm-dd) |\n",
"| --date-to | -to | now | To date (yyyy-mm-dd or 'now') |\n",
"| --timeframe | -t | d1 | Timeframe aggregation (tick, s1, m1, m5, m15, m30, h1, h4, d1, mn1) |\n",
"| --price-type | -p | bid | Price type: (bid, ask) |\n",
"| --utc-offset | -utc | 0 | UTC offset in minutes |\n",
"| --volumes | -v | false | Include volumes |\n",
"| --volume-units | -vu | millions | Volume units (millions, thousands, units) |\n",
"| --flats | -fl | false | Include flats (0 volumes) |\n",
"| --format | -f | json | Output format (csv, json, array). View output examples |\n",
"| --directory | -dir | ./download | Download directory |\n",
"| --batch-size | -bs | 10 | Batch size of downloaded artifacts |\n",
"| --batch-pause | -bp | 1000 | Pause between batches in ms |\n",
"| --cache | -ch | false | Use cache |\n",
"| --cache-path | -chpath | ./dukascopy-cache | Folder path for cache data |\n",
"| --retries | -r | 0 | Number of retries for a failed artifact download |\n",
"| --retry-on-empty | -re | false | A flag indicating whether requests with successful but empty (0 Bytes) responses should be retried. If retries is 0, this parameter will be ignored |\n",
"| --no-fail-after-retries | -fr | false | A flag indicating whether the process should fail after all retries have been exhausted. If retries is 0, this parameter will be ignored |\n",
"| --retry-pause | -rp | 500 | Pause between retries in milliseconds |\n",
"| --debug | -d | false | Output extra debugging |\n",
"| --silent | -s | false | Hides the search config in the CLI output |\n",
"| --inline | -in | false | Makes files smaller in size by removing new lines in the output (works only with json and array formats) |\n",
"| --file-name | -fn | | Custom file name for the generated file |\n",
"| --help | -h | | display help for command |"
],
"metadata": {
"id": "F7iDYaeVQYh1"
}
},
{
"cell_type": "code",
"source": [
"# Paramètres de la commande\n",
"instrument = \"eurusd\"\n",
"type_de_donnees = \"m1\"\n",
"format_fichier = \"csv\"\n",
"debut = pd.Timestamp(2024, 1, 1)\n",
"fin = pd.Timestamp(2024, 2, 13)\n",
"\n",
"# Générer le chemin du répertoire de contenu et construire la commande\n",
"content_dir = Path(\"/content\")\n",
"commande = f\"npx dukascopy-node -i {instrument} -from {debut:%Y-%m-%d} -to {fin:%Y-%m-%d} -v {True} -t {type_de_donnees} -f {format_fichier}\"\n",
"\n",
"# Exécution de la commande et capture de la sortie\n",
"try:\n",
" resultat = subprocess.run(commande, shell=True, check=True, stdout=subprocess.PIPE, stderr=subprocess.STDOUT, text=True)\n",
" sortie_commande = resultat.stdout\n",
" print(sortie_commande)\n",
"\n",
" # Extraction du nom du fichier CSV et traitement des données\n",
" match = re.search(r\"File saved: (\\S+)\", sortie_commande)\n",
" if match:\n",
" chemin_fichier_csv = match.group(1)\n",
" chemin_fichier_csv = content_dir / chemin_fichier_csv # Avec pathlib, nous assurons l’uniformité du chemin\n",
" print(\"Chemin du fichier CSV:\", chemin_fichier_csv)\n",
"\n",
" # Lecture et traitement du fichier CSV\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'], unit='ms').dt.strftime('%Y-%m-%d %H:%M')\n",
" donnees = donnees.rename(columns={'timestamp': 'timestamp'})\n",
" donnees.to_csv(chemin_fichier_csv, index=False)\n",
" print(f\"Le fichier CSV a été mis à jour avec les timestamps formatés : {chemin_fichier_csv}\")\n",
" print(donnees.head())\n",
" except Exception as e:\n",
" print(\"Erreur lors de la lecture ou de la conversion du fichier CSV:\", e)\n",
" else:\n",
" print(\"Le nom du fichier n’a pas pu être extrait.\")\n",
"except subprocess.CalledProcessError as e:\n",
" print(\"Erreur lors de l’exécution de la commande:\", e)\n"
],
"metadata": {
"id": "9f05Bp4AslcA",
"colab": {
"base_uri": "https://localhost:8080/"
},
"outputId": "62390ad0-8f52-4e70-ad36-d8dd23e6f55e"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"npx: installed 30 in 7.213s\n",
"----------------------------------------------------\n",
"Downloading historical price data for:\n",
"----------------------------------------------------\n",
"Instrument: Euro vs US Dollar\n",
"Timeframe: m1\n",
"From date: Jan 1, 2024, 12:00 AM\n",
"To date: Feb 13, 2024, 12:00 AM\n",
"Price type: bid\n",
"Volumes: true\n",
"UTC Offset: 0\n",
"Include flats: false\n",
"Format: csv\n",
"----------------------------------------------------\n",
"----------------------------------------------------\n",
"√ File saved: download/eurusd-m1-bid-2024-01-01-2024-02-13.csv (2.6 MB)\n",
"\n",
"Download time: 14.9s\n",
"\n",
"\n",
"Chemin du fichier CSV: /content/download/eurusd-m1-bid-2024-01-01-2024-02-13.csv\n",
"Le fichier CSV a été mis à jour avec les timestamps formatés : /content/download/eurusd-m1-bid-2024-01-01-2024-02-13.csv\n",
" timestamp open high low close volume\n",
"0 2024-01-01 22:00 1.10427 1.10429 1.10425 1.10429 5.9\n",
"1 2024-01-01 22:01 1.10429 1.10429 1.10429 1.10429 8.1\n",
"2 2024-01-01 22:02 1.10429 1.10429 1.10429 1.10429 9.9\n",
"3 2024-01-01 22:03 1.10429 1.10429 1.10426 1.10426 18.9\n",
"4 2024-01-01 22:04 1.10426 1.10431 1.10424 1.10425 18.0\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" # Chargement du fichier CSV dans un DataFrame pandas\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
"\n",
" # Assurer que la colonne 'timestamp' est au bon format de date-heure\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
"\n",
" # Formatage de la date-heure de la requête pour correspondre au format du DataFrame\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
"\n",
" # Filtrer pour obtenir les données de la minute spécifiée\n",
" info_minute = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(minutes=1))\n",
" ]\n",
"\n",
" # Vérifier si des données ont été trouvées et les retourner\n",
" if not info_minute.empty:\n",
" # On utilise iloc[0] pour obtenir le premier enregistrement correspondant\n",
" return info_minute.iloc[0].to_dict()\n",
" else:\n",
" # Aucune donnée correspondante n’a été trouvée\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n"
],
"metadata": {
"id": "ZiyMguNXIv5M"
},
"execution_count": null,
"outputs": []
},
{
"cell_type": "code",
"source": [
"\n",
"date_heure_str = \"2024-01-05 11:59\"\n",
"\n",
"# Utilisation de la fonction pour récupérer les informations\n",
"informations = Data(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage des informations récupérées\n",
"if informations:\n",
" print(f\"Informations pour {date_heure_str}:\")\n",
" print(f\"Open: {informations['open']}\")\n",
" print(f\"High: {informations['high']}\")\n",
" print(f\"Low: {informations['low']}\")\n",
" print(f\"Close: {informations['close']}\")\n",
" print(f\"Volume: {informations['volume']}\")\n",
"else:\n",
" print(\"Aucune information trouvée pour la minute spécifiée.\")\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "MsTWF5PjI5_Z",
"outputId": "54d8af17-5876-4929-bce6-84a1af418ece"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Informations pour 2024-01-05 11:59:\n",
"Open: 1.09132\n",
"High: 1.09143\n",
"Low: 1.09132\n",
"Close: 1.09142\n",
"Volume: 104.55999755859376\n"
]
}
]
},
{
"cell_type": "code",
"source": [
"api_key = 'cnguep9r01qhlsli99igcnguep9r01qhlsli99j0'\n",
"symbol = 'EUR/USD'\n",
"interval = '1min'\n",
"count = 10\n",
"\n",
"url = f'https://finnhub.io/api/v1/forex/candles?symbol={symbol}&resolution={interval}&count={count}'\n",
"headers = {'X-Finnhub-Token': api_key}\n",
"\n",
"response = requests.get(url, headers=headers)\n",
"\n",
"# Imprimez la réponse brute de l'API\n",
"print(response.text)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "Nk5vmJ1icz_T",
"outputId": "3d272585-e3c1-4ca7-b736-323660248f5a"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"<!DOCTYPE html>\n",
"<html lang=\"en\">\n",
"<head>\n",
" <meta http-equiv=\"Content-Type\" content=\"text/html; charset=UTF-8\"/>\n",
" <meta name=\"author\" content=\"Finnhub.io\"/>\n",
"\n",
" <meta name=\"viewport\" content=\"width=device-width, initial-scale=1, maximum-scale=5\">\n",
" <link rel=\"shortcut icon\"\n",
" href=\"/static/img/webp/finnhub-logo.webp\"/>\n",
"\n",
" \n",
" <script async src=\"https://www.googletagmanager.com/gtag/js?id=UA-64068110-3\"></script>\n",
" <script>\n",
" window.dataLayer = window.dataLayer || [];\n",
"\n",
" function gtag() {\n",
" dataLayer.push(arguments);\n",
" }\n",
"\n",
" gtag('js', new Date());\n",
"\n",
" gtag('config', 'UA-64068110-3');\n",
" </script>\n",
"\n",
" <link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/4.1.0/css/bootstrap.min.css\">\n",
"\n",
" <script src=\"https://ajax.googleapis.com/ajax/libs/jquery/3.3.1/jquery.min.js\"></script>\n",
" <script src='https://cdnjs.cloudflare.com/ajax/libs/jquery-easing/1.3/jquery.easing.min.js'></script>\n",
"\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/popper.js/1.14.0/umd/popper.min.js\"></script>\n",
" <script src=\"https://maxcdn.bootstrapcdn.com/bootstrap/4.1.0/js/bootstrap.min.js\"></script>\n",
"\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/react/16.8.0/umd/react.production.min.js\"></script>\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/react-dom/16.8.0/umd/react-dom.production.min.js\"></script>\n",
"\n",
" <link rel=\"stylesheet\" href=\"https://use.fontawesome.com/releases/v6.0.0/css/all.css\">\n",
"\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/waypoints/4.0.1/jquery.waypoints.min.js\"></script>\n",
" <script src=\"https://cdnjs.cloudflare.com/ajax/libs/redux/3.3.1/redux.min.js\"></script>\n",
" <script src=\"https://unpkg.com/react-redux@5.0.7/dist/react-redux.min.js\"></script>\n",
" <script type=\"text/javascript\">\n",
"\n",
" var protocol = location.protocol;\n",
" var wsprotocol\n",
" if (protocol === \"https:\") {\n",
" wsprotocol = \"wss:\";\n",
" } else {\n",
" wsprotocol = \"ws:\";\n",
" }\n",
" var slashes = protocol.concat(\"//\");\n",
" var host = slashes.concat(window.location.host);\n",
"\n",
" window.wsUrl = \"\"\n",
" if (window.wsUrl === '') {\n",
" window.wsUrl = window.location.host\n",
" }\n",
"\n",
" window.getCookie = function getcookie(cname) {\n",
" var name = cname + \"=\";\n",
" var decodedCookie = decodeURIComponent(document.cookie);\n",
" var ca = decodedCookie.split(';');\n",
" for (var i = 0; i < ca.length; i++) {\n",
" var c = ca[i];\n",
" while (c.charAt(0) == ' ') {\n",
" c = c.substring(1);\n",
" }\n",
" if (c.indexOf(name) == 0) {\n",
" return c.substring(name.length, c.length);\n",
" }\n",
" }\n",
" return \"\";\n",
" }\n",
"\n",
" window.setCookie = function setcookie(cname, cvalue, exdays) {\n",
" var d = new Date();\n",
" d.setTime(d.getTime() + (exdays * 24 * 60 * 60 * 1000));\n",
" var expires = \"expires=\" + d.toUTCString();\n",
" document.cookie = cname + \"=\" + cvalue + \";\" + expires + \";path=/\";\n",
" }\n",
"\n",
" window.toTitle = function toTitleCase(str) {\n",
" return str.replace(/\\w\\S*/g, function (txt) {\n",
" return txt.charAt(0).toUpperCase() + txt.substr(1).toLowerCase();\n",
" });\n",
" }\n",
"\n",
" window.getUrlParams = function (key) {\n",
" let url = location.href\n",
" var params = {};\n",
" (url + '?').split('?')[1].split('&').forEach(function (pair) {\n",
" pair = (pair + '=').split('=').map(decodeURIComponent);\n",
" if (pair[0].length) {\n",
" params[pair[0]] = pair[1];\n",
" }\n",
" });\n",
" return params[key];\n",
" };\n",
"\n",
" window.stripHtml = function (html) {\n",
" let tmp = document.createElement(\"DIV\");\n",
" tmp.innerHTML = html;\n",
" return tmp.textContent || tmp.innerText || \"\";\n",
" }\n",
"\n",
" window.timeConverter = function tConverter(UNIX_timestamp) {\n",
" var a = new Date(UNIX_timestamp * 1000);\n",
" var months = ['Jan', 'Feb', 'Mar', 'Apr', 'May', 'Jun', 'Jul', 'Aug', 'Sep', 'Oct', 'Nov', 'Dec'];\n",
" var dates = ['Sun', 'Mon', 'Tue', 'Wed', 'Thu', 'Fri', 'Sat']\n",
" var month = months[a.getMonth()];\n",
" var date = a.getDate();\n",
" var time = dates[a.getDay()] + ' ' + month + ' ' + date\n",
" return time;\n",
" }\n",
"\n",
" window.roundToX = function (num, X) {\n",
" return +(Math.round(num + \"e+\" + X) + \"e-\" + X);\n",
" }\n",
" </script>\n",
"\n",
"\n",
" <meta name=\"description\"\n",
" content=\"Finnhub - Free stock API for realtime market data, global company fundamentals, economic data, and alternative data.\"/>\n",
"<meta name=\"keywords\"\n",
" content=\"stock api,realtime,stock,equity,forex,crypto,fundamental data,economic data,alternative data\">\n",
"<meta property=\"og:title\" content=\"Finnhub - Free realtime APIs for stock, forex and cryptocurrency.\">\n",
"<meta property=\"og:type\" content=\"website\">\n",
"<meta property=\"og:url\" content=\"https://finnhub.io/\">\n",
"<meta property=\"og:image\"\n",
" content=\"https://static.finnhub.io/img/finnhub_2020-05-09_20_51/logo/logo-transparent-thumbnail.png\">\n",
"<meta property=\"og:description\"\n",
" content=\"Finnhub - Free APIs for realtime stock, forex, and cryptocurrency. Company fundamentals, Economic data, and Alternative data.\">\n",
"<meta name=\"twitter:card\" content=\"summary\">\n",
"<meta name=\"twitter:title\"\n",
" content=\"Finnhub - Free realtime APIs for stock, forex and cryptocurrency.\">\n",
"<meta name=\"twitter:image\"\n",
" content=\"https://static.finnhub.io/img/finnhub_2020-05-09_20_51/logo/logo-transparent-thumbnail.png\">\n",
"<meta name=\"twitter:description\"\n",
" content=\"Finnhub - Free APIs for realtime stock, forex, and cryptocurrency. Company fundamentals, Economic data, and Alternative data.\">\n",
"<meta charset=\"UTF-8\">\n",
"\n",
" <title>Finnhub Stock APIs - Real-time stock prices, Company fundamentals, Estimates, and Alternative data.</title>\n",
"\n",
" <script type=\"text/javascript\">\n",
" window.serverData = {\"announcement\":\"Global Filings Search is now available on Finnhub. Check out \\u003ca href=\\\"/docs/api/global-filings-search\\\"\\u003ehere\\u003c/a\\u003e\"}\n",
" window.login = false \n",
" window.isSubscribe = null \n",
" </script>\n",
"</head>\n",
"<body>\n",
"<div id=\"root\"></div>\n",
"<script type=\"text/javascript\" src=\"/static/js/webpack/dist/main-index.bundle.357882d13b93982f42b7.js\"></script></body>\n",
"</html>\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"### Après le fichier\n",
"\n",
"Maintenant que l'on a récupéré ce petit csv de merde\n",
"- 30 dernière minute : open + high + low + volume = 150 (on commence de la plus recent a la plus ancienne)\n",
"- 30 dernière : pareil = 150\n",
"- 30 dernier jours : pareil = 150\n",
"- 7 jour de la semaine = 7 (de lundi a dimanche)\n",
"- 31 jour du mois = 31\n",
"- 12 mois de l'anéee = 12\n"
],
"metadata": {
"id": "eOP-2yDs6cBk"
}
},
{
"cell_type": "markdown",
"source": [
"# Liste pour les 30 derniers minutes"
],
"metadata": {
"id": "76Amy6xU7YuB"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée dans la question initiale.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" # Chargement du fichier CSV dans un DataFrame pandas.\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
"\n",
" # Assurer que la colonne timestamp est au bon format de date-heure.\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
"\n",
" # Formatage de la date-heure de la requête pour correspondre au format du DataFrame.\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
"\n",
" # Filtrer pour obtenir les données de la minute spécifiée.\n",
" info_minute = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(minutes=1))\n",
" ]\n",
"\n",
" # Vérifier si des données ont été trouvées et les retourner.\n",
" if not info_minute.empty:\n",
" # On utilise iloc[0] pour obtenir le premier enregistrement correspondant.\n",
" return info_minute.iloc[0].to_dict()\n",
" else:\n",
" # Aucune donnée correspondante n’a été trouvée.\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"# Fonction pour lister les données sur les 30 dernières minutes.\n",
"def liste_donnees_30min(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" # Convertir la date_heure_str en objet datetime.\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
" # Boucle pour récupérer les données minute par minute de la plus récente à la plus vieille.\n",
" for i in range(31):\n",
" # Le point de départ pour chaque minute.\n",
" minute_debut = date_heure_finale - pd.Timedelta(minutes=i)\n",
" # Convertir en chaîne de caractères ISO pour l’appel de fonction.\n",
" minute_debut_str = minute_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" # Utiliser la fonction Data pour obtenir les données.\n",
" donnee_minute = Data(chemin_fichier_csv, minute_debut_str)\n",
" # Si des données sont trouvées, les ajouter à la liste.\n",
" if donnee_minute:\n",
" liste_donnees.append(donnee_minute)\n",
"\n",
" # Renvoyer la liste complète des données des 30 dernières minutes.\n",
" return liste_donnees\n",
"\n",
"# Exemple d’utilisation.\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure au format correspondant à vos données.\n",
"\n",
"# Appel de la fonction.\n",
"donnees_30_dernieres_minutes = liste_donnees_30min(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage des résultats.\n",
"for donnee in donnees_30_dernieres_minutes:\n",
" print(donnee)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "nFmUPW6NN00u",
"outputId": "5b1ab0c1-502d-4dff-9d43-ccf20d822d23"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"{'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}\n",
"{'timestamp': Timestamp('2024-02-01 12:44:00'), 'open': 1.08143, 'high': 1.08178, 'low': 1.08142, 'close': 1.08178, 'volume': 524.3599853515625}\n",
"{'timestamp': Timestamp('2024-02-01 12:43:00'), 'open': 1.08143, 'high': 1.0815, 'low': 1.08139, 'close': 1.08142, 'volume': 476.7699890136719}\n",
"{'timestamp': Timestamp('2024-02-01 12:42:00'), 'open': 1.08155, 'high': 1.08157, 'low': 1.08142, 'close': 1.08145, 'volume': 553.02001953125}\n",
"{'timestamp': Timestamp('2024-02-01 12:41:00'), 'open': 1.08146, 'high': 1.08152, 'low': 1.08141, 'close': 1.08151, 'volume': 171.00999450683594}\n",
"{'timestamp': Timestamp('2024-02-01 12:40:00'), 'open': 1.0813, 'high': 1.08147, 'low': 1.08125, 'close': 1.08147, 'volume': 460.5700073242188}\n",
"{'timestamp': Timestamp('2024-02-01 12:39:00'), 'open': 1.08118, 'high': 1.08131, 'low': 1.08116, 'close': 1.08131, 'volume': 339.1099853515625}\n",
"{'timestamp': Timestamp('2024-02-01 12:38:00'), 'open': 1.08107, 'high': 1.08118, 'low': 1.08107, 'close': 1.08117, 'volume': 337.29998779296875}\n",
"{'timestamp': Timestamp('2024-02-01 12:37:00'), 'open': 1.08111, 'high': 1.08126, 'low': 1.08106, 'close': 1.08106, 'volume': 223.5}\n",
"{'timestamp': Timestamp('2024-02-01 12:36:00'), 'open': 1.08101, 'high': 1.08117, 'low': 1.081, 'close': 1.08111, 'volume': 184.509994506836}\n",
"{'timestamp': Timestamp('2024-02-01 12:35:00'), 'open': 1.08092, 'high': 1.08104, 'low': 1.08078, 'close': 1.08104, 'volume': 401.8900146484375}\n",
"{'timestamp': Timestamp('2024-02-01 12:34:00'), 'open': 1.08102, 'high': 1.08106, 'low': 1.08093, 'close': 1.08093, 'volume': 187.9900054931641}\n",
"{'timestamp': Timestamp('2024-02-01 12:33:00'), 'open': 1.08118, 'high': 1.08119, 'low': 1.08096, 'close': 1.08102, 'volume': 492.9599914550781}\n",
"{'timestamp': Timestamp('2024-02-01 12:32:00'), 'open': 1.08124, 'high': 1.08126, 'low': 1.08097, 'close': 1.08117, 'volume': 396.239990234375}\n",
"{'timestamp': Timestamp('2024-02-01 12:31:00'), 'open': 1.08153, 'high': 1.08155, 'low': 1.08122, 'close': 1.08123, 'volume': 237.1199951171875}\n",
"{'timestamp': Timestamp('2024-02-01 12:30:00'), 'open': 1.08145, 'high': 1.08159, 'low': 1.08144, 'close': 1.08153, 'volume': 235.19000244140625}\n",
"{'timestamp': Timestamp('2024-02-01 12:29:00'), 'open': 1.08128, 'high': 1.08144, 'low': 1.08126, 'close': 1.08144, 'volume': 298.2900085449219}\n",
"{'timestamp': Timestamp('2024-02-01 12:28:00'), 'open': 1.08111, 'high': 1.0813, 'low': 1.08109, 'close': 1.08128, 'volume': 151.35000610351562}\n",
"{'timestamp': Timestamp('2024-02-01 12:27:00'), 'open': 1.08116, 'high': 1.08119, 'low': 1.08111, 'close': 1.08112, 'volume': 168.67999267578125}\n",
"{'timestamp': Timestamp('2024-02-01 12:26:00'), 'open': 1.08121, 'high': 1.08122, 'low': 1.08116, 'close': 1.08117, 'volume': 314.760009765625}\n",
"{'timestamp': Timestamp('2024-02-01 12:25:00'), 'open': 1.08108, 'high': 1.08131, 'low': 1.08108, 'close': 1.08122, 'volume': 277.32000732421875}\n",
"{'timestamp': Timestamp('2024-02-01 12:24:00'), 'open': 1.08122, 'high': 1.08132, 'low': 1.08107, 'close': 1.08109, 'volume': 328.8699951171875}\n",
"{'timestamp': Timestamp('2024-02-01 12:23:00'), 'open': 1.08118, 'high': 1.08129, 'low': 1.08117, 'close': 1.08121, 'volume': 426.1099853515625}\n",
"{'timestamp': Timestamp('2024-02-01 12:22:00'), 'open': 1.08124, 'high': 1.08128, 'low': 1.08116, 'close': 1.08117, 'volume': 388.7900085449219}\n",
"{'timestamp': Timestamp('2024-02-01 12:21:00'), 'open': 1.08123, 'high': 1.08134, 'low': 1.08121, 'close': 1.08123, 'volume': 447.4100036621094}\n",
"{'timestamp': Timestamp('2024-02-01 12:20:00'), 'open': 1.08167, 'high': 1.08167, 'low': 1.08122, 'close': 1.08122, 'volume': 224.88999938964844}\n",
"{'timestamp': Timestamp('2024-02-01 12:19:00'), 'open': 1.08143, 'high': 1.0817, 'low': 1.08143, 'close': 1.08168, 'volume': 90.05000305175781}\n",
"{'timestamp': Timestamp('2024-02-01 12:18:00'), 'open': 1.08117, 'high': 1.08143, 'low': 1.08113, 'close': 1.08143, 'volume': 273.9599914550781}\n",
"{'timestamp': Timestamp('2024-02-01 12:17:00'), 'open': 1.08108, 'high': 1.08119, 'low': 1.08108, 'close': 1.08117, 'volume': 269.0400085449219}\n",
"{'timestamp': Timestamp('2024-02-01 12:16:00'), 'open': 1.08123, 'high': 1.08127, 'low': 1.08106, 'close': 1.08108, 'volume': 401.4700012207031}\n",
"{'timestamp': Timestamp('2024-02-01 12:15:00'), 'open': 1.08122, 'high': 1.08138, 'low': 1.08118, 'close': 1.08124, 'volume': 482.1799926757813}\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# 30 derniers jours"
],
"metadata": {
"id": "7YxteF4kN32H"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"# Fonction pour lister les données sur les 7 derniers jours.\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
" # Nous parcourons maintenant seulement 7 jours, donc range(7)\n",
" for i in range(30, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_30_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 7 derniers jours :\")\n",
"print(donnees_30_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "VwDg9tVROFQK",
"outputId": "9202db03-19d2-4f5b-b84c-87f6e6fd3e35"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 7 derniers jours :\n",
"[{'timestamp': Timestamp('2024-01-02 12:45:00'), 'open': 1.09608, 'high': 1.09619, 'low': 1.09607, 'close': 1.09615, 'volume': 192.1300048828125}, {'timestamp': Timestamp('2024-01-03 12:45:00'), 'open': 1.09205, 'high': 1.09217, 'low': 1.09192, 'close': 1.09208, 'volume': 380.6900024414063}, {'timestamp': Timestamp('2024-01-04 12:45:00'), 'open': 1.0955, 'high': 1.0955, 'low': 1.0952, 'close': 1.09534, 'volume': 107.33999633789062}, {'timestamp': Timestamp('2024-01-05 12:45:00'), 'open': 1.09184, 'high': 1.09196, 'low': 1.09182, 'close': 1.09195, 'volume': 105.77999877929688}, {'timestamp': Timestamp('2024-01-07 22:04:00'), 'open': 1.09386, 'high': 1.0939, 'low': 1.09376, 'close': 1.0939, 'volume': 28.600000381469727}, {'timestamp': Timestamp('2024-01-08 12:45:00'), 'open': 1.0942, 'high': 1.09436, 'low': 1.0942, 'close': 1.0943, 'volume': 109.26000213623048}, {'timestamp': Timestamp('2024-01-09 12:45:00'), 'open': 1.09324, 'high': 1.09341, 'low': 1.09319, 'close': 1.09338, 'volume': 175.22999572753906}, {'timestamp': Timestamp('2024-01-10 12:45:00'), 'open': 1.09447, 'high': 1.09448, 'low': 1.09439, 'close': 1.09442, 'volume': 248.8500061035156}, {'timestamp': Timestamp('2024-01-11 12:45:00'), 'open': 1.09844, 'high': 1.09862, 'low': 1.09841, 'close': 1.0986, 'volume': 214.6000061035156}, {'timestamp': Timestamp('2024-01-12 12:45:00'), 'open': 1.09432, 'high': 1.09432, 'low': 1.09412, 'close': 1.09416, 'volume': 160.11000061035156}, {'timestamp': Timestamp('2024-01-14 22:00:00'), 'open': 1.0948, 'high': 1.09484, 'low': 1.09476, 'close': 1.09476, 'volume': 10.399999618530272}, {'timestamp': Timestamp('2024-01-15 12:45:00'), 'open': 1.09546, 'high': 1.0956, 'low': 1.09541, 'close': 1.09541, 'volume': 139.64999389648438}, {'timestamp': Timestamp('2024-01-16 12:45:00'), 'open': 1.0885, 'high': 1.08852, 'low': 1.08844, 'close': 1.08846, 'volume': 153.57000732421875}, {'timestamp': Timestamp('2024-01-17 12:45:00'), 'open': 1.0872, 'high': 1.0872, 'low': 1.0871, 'close': 1.08715, 'volume': 216.75}, {'timestamp': Timestamp('2024-01-18 12:45:00'), 'open': 1.08786, 'high': 1.08798, 'low': 1.08786, 'close': 1.08798, 'volume': 135.25999450683594}, {'timestamp': Timestamp('2024-01-19 12:45:00'), 'open': 1.08825, 'high': 1.08825, 'low': 1.08815, 'close': 1.08816, 'volume': 176.00999450683594}, {'timestamp': Timestamp('2024-01-21 22:00:00'), 'open': 1.08906, 'high': 1.08918, 'low': 1.08905, 'close': 1.08909, 'volume': 5.5}, {'timestamp': Timestamp('2024-01-22 12:45:00'), 'open': 1.08977, 'high': 1.08981, 'low': 1.08958, 'close': 1.08962, 'volume': 271.3900146484375}, {'timestamp': Timestamp('2024-01-23 12:45:00'), 'open': 1.08625, 'high': 1.08627, 'low': 1.08616, 'close': 1.08621, 'volume': 221.3300018310547}, {'timestamp': Timestamp('2024-01-24 12:45:00'), 'open': 1.08952, 'high': 1.08956, 'low': 1.0895, 'close': 1.08954, 'volume': 150.52999877929688}, {'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# 7 derniers jours"
],
"metadata": {
"id": "HeT4Hg1pP0H1"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"# Fonction pour lister les données sur les 7 derniers jours.\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
" # Nous parcourons maintenant seulement 7 jours, donc range(7)\n",
" for i in range(7, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_7_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 7 derniers jours :\")\n",
"print(donnees_7_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "C9t40s2GP4PW",
"outputId": "0aa142c2-ec45-439e-a342-e4be3c7da950"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 7 derniers jours :\n",
"[{'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# 31 derniers jours"
],
"metadata": {
"id": "Vi2YecL_SBAW"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
"\n",
" for i in range(31, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_31_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 31 derniers jours :\")\n",
"print(donnees_31_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "4mA8CQjNSFQA",
"outputId": "0d728b04-ab3a-4a02-d7c4-d7364527ffdc"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 31 derniers jours :\n",
"[{'timestamp': Timestamp('2024-01-01 22:00:00'), 'open': 1.10427, 'high': 1.10429, 'low': 1.10425, 'close': 1.10429, 'volume': 5.900000095367432}, {'timestamp': Timestamp('2024-01-02 12:45:00'), 'open': 1.09608, 'high': 1.09619, 'low': 1.09607, 'close': 1.09615, 'volume': 192.1300048828125}, {'timestamp': Timestamp('2024-01-03 12:45:00'), 'open': 1.09205, 'high': 1.09217, 'low': 1.09192, 'close': 1.09208, 'volume': 380.6900024414063}, {'timestamp': Timestamp('2024-01-04 12:45:00'), 'open': 1.0955, 'high': 1.0955, 'low': 1.0952, 'close': 1.09534, 'volume': 107.33999633789062}, {'timestamp': Timestamp('2024-01-05 12:45:00'), 'open': 1.09184, 'high': 1.09196, 'low': 1.09182, 'close': 1.09195, 'volume': 105.77999877929688}, {'timestamp': Timestamp('2024-01-07 22:04:00'), 'open': 1.09386, 'high': 1.0939, 'low': 1.09376, 'close': 1.0939, 'volume': 28.600000381469727}, {'timestamp': Timestamp('2024-01-08 12:45:00'), 'open': 1.0942, 'high': 1.09436, 'low': 1.0942, 'close': 1.0943, 'volume': 109.26000213623048}, {'timestamp': Timestamp('2024-01-09 12:45:00'), 'open': 1.09324, 'high': 1.09341, 'low': 1.09319, 'close': 1.09338, 'volume': 175.22999572753906}, {'timestamp': Timestamp('2024-01-10 12:45:00'), 'open': 1.09447, 'high': 1.09448, 'low': 1.09439, 'close': 1.09442, 'volume': 248.8500061035156}, {'timestamp': Timestamp('2024-01-11 12:45:00'), 'open': 1.09844, 'high': 1.09862, 'low': 1.09841, 'close': 1.0986, 'volume': 214.6000061035156}, {'timestamp': Timestamp('2024-01-12 12:45:00'), 'open': 1.09432, 'high': 1.09432, 'low': 1.09412, 'close': 1.09416, 'volume': 160.11000061035156}, {'timestamp': Timestamp('2024-01-14 22:00:00'), 'open': 1.0948, 'high': 1.09484, 'low': 1.09476, 'close': 1.09476, 'volume': 10.399999618530272}, {'timestamp': Timestamp('2024-01-15 12:45:00'), 'open': 1.09546, 'high': 1.0956, 'low': 1.09541, 'close': 1.09541, 'volume': 139.64999389648438}, {'timestamp': Timestamp('2024-01-16 12:45:00'), 'open': 1.0885, 'high': 1.08852, 'low': 1.08844, 'close': 1.08846, 'volume': 153.57000732421875}, {'timestamp': Timestamp('2024-01-17 12:45:00'), 'open': 1.0872, 'high': 1.0872, 'low': 1.0871, 'close': 1.08715, 'volume': 216.75}, {'timestamp': Timestamp('2024-01-18 12:45:00'), 'open': 1.08786, 'high': 1.08798, 'low': 1.08786, 'close': 1.08798, 'volume': 135.25999450683594}, {'timestamp': Timestamp('2024-01-19 12:45:00'), 'open': 1.08825, 'high': 1.08825, 'low': 1.08815, 'close': 1.08816, 'volume': 176.00999450683594}, {'timestamp': Timestamp('2024-01-21 22:00:00'), 'open': 1.08906, 'high': 1.08918, 'low': 1.08905, 'close': 1.08909, 'volume': 5.5}, {'timestamp': Timestamp('2024-01-22 12:45:00'), 'open': 1.08977, 'high': 1.08981, 'low': 1.08958, 'close': 1.08962, 'volume': 271.3900146484375}, {'timestamp': Timestamp('2024-01-23 12:45:00'), 'open': 1.08625, 'high': 1.08627, 'low': 1.08616, 'close': 1.08621, 'volume': 221.3300018310547}, {'timestamp': Timestamp('2024-01-24 12:45:00'), 'open': 1.08952, 'high': 1.08956, 'low': 1.0895, 'close': 1.08954, 'volume': 150.52999877929688}, {'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"12 dernier mois"
],
"metadata": {
"id": "APxPvhVTT2OZ"
}
},
{
"cell_type": "code",
"source": [
"import pandas as pd\n",
"\n",
"# Fonction Data donnée.\n",
"def Data(chemin_fichier_csv, date_heure_str):\n",
" try:\n",
" donnees = pd.read_csv(chemin_fichier_csv)\n",
" donnees['timestamp'] = pd.to_datetime(donnees['timestamp'])\n",
" date_heure_requise = pd.to_datetime(date_heure_str)\n",
" info_jour = donnees[\n",
" (donnees['timestamp'] >= date_heure_requise) &\n",
" (donnees['timestamp'] < date_heure_requise + pd.Timedelta(days=1))\n",
" ]\n",
" if not info_jour.empty:\n",
" return info_jour.iloc[0].to_dict()\n",
" else:\n",
" return None\n",
" except Exception as e:\n",
" print(f\"Erreur lors de la lecture du fichier CSV ou de la recherche des données: {e}\")\n",
" return None\n",
"\n",
"\n",
"def liste_donnees_7jours(chemin_fichier_csv, date_heure_str):\n",
" liste_donnees = []\n",
" date_heure_finale = pd.to_datetime(date_heure_str)\n",
"\n",
"\n",
" for i in range(365, -1, -1):\n",
" jour_debut = date_heure_finale - pd.Timedelta(days=i)\n",
" jour_debut_str = jour_debut.strftime('%Y-%m-%d %H:%M:%S')\n",
" donnee_jour = Data(chemin_fichier_csv, jour_debut_str)\n",
" if donnee_jour:\n",
" liste_donnees.append(donnee_jour)\n",
"\n",
" return liste_donnees\n",
"\n",
"\n",
"date_heure_str = \"2024-02-01 12:45:00\" # Ajustez cette date-heure.\n",
"\n",
"donnees_365_derniers_jours_liste = liste_donnees_7jours(chemin_fichier_csv, date_heure_str)\n",
"\n",
"# Affichage de la liste finale contenant toutes les données.\n",
"print(\"Liste des données des 12 derniers mois :\")\n",
"print(donnees_365_derniers_jours_liste)\n"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/"
},
"id": "hQ1WARdpT6c8",
"outputId": "778ba183-29d3-405d-b6a8-6c01620da107"
},
"execution_count": null,
"outputs": [
{
"output_type": "stream",
"name": "stdout",
"text": [
"Liste des données des 12 derniers mois :\n",
"[{'timestamp': Timestamp('2024-01-01 22:00:00'), 'open': 1.10427, 'high': 1.10429, 'low': 1.10425, 'close': 1.10429, 'volume': 5.900000095367432}, {'timestamp': Timestamp('2024-01-02 12:45:00'), 'open': 1.09608, 'high': 1.09619, 'low': 1.09607, 'close': 1.09615, 'volume': 192.1300048828125}, {'timestamp': Timestamp('2024-01-03 12:45:00'), 'open': 1.09205, 'high': 1.09217, 'low': 1.09192, 'close': 1.09208, 'volume': 380.6900024414063}, {'timestamp': Timestamp('2024-01-04 12:45:00'), 'open': 1.0955, 'high': 1.0955, 'low': 1.0952, 'close': 1.09534, 'volume': 107.33999633789062}, {'timestamp': Timestamp('2024-01-05 12:45:00'), 'open': 1.09184, 'high': 1.09196, 'low': 1.09182, 'close': 1.09195, 'volume': 105.77999877929688}, {'timestamp': Timestamp('2024-01-07 22:04:00'), 'open': 1.09386, 'high': 1.0939, 'low': 1.09376, 'close': 1.0939, 'volume': 28.600000381469727}, {'timestamp': Timestamp('2024-01-08 12:45:00'), 'open': 1.0942, 'high': 1.09436, 'low': 1.0942, 'close': 1.0943, 'volume': 109.26000213623048}, {'timestamp': Timestamp('2024-01-09 12:45:00'), 'open': 1.09324, 'high': 1.09341, 'low': 1.09319, 'close': 1.09338, 'volume': 175.22999572753906}, {'timestamp': Timestamp('2024-01-10 12:45:00'), 'open': 1.09447, 'high': 1.09448, 'low': 1.09439, 'close': 1.09442, 'volume': 248.8500061035156}, {'timestamp': Timestamp('2024-01-11 12:45:00'), 'open': 1.09844, 'high': 1.09862, 'low': 1.09841, 'close': 1.0986, 'volume': 214.6000061035156}, {'timestamp': Timestamp('2024-01-12 12:45:00'), 'open': 1.09432, 'high': 1.09432, 'low': 1.09412, 'close': 1.09416, 'volume': 160.11000061035156}, {'timestamp': Timestamp('2024-01-14 22:00:00'), 'open': 1.0948, 'high': 1.09484, 'low': 1.09476, 'close': 1.09476, 'volume': 10.399999618530272}, {'timestamp': Timestamp('2024-01-15 12:45:00'), 'open': 1.09546, 'high': 1.0956, 'low': 1.09541, 'close': 1.09541, 'volume': 139.64999389648438}, {'timestamp': Timestamp('2024-01-16 12:45:00'), 'open': 1.0885, 'high': 1.08852, 'low': 1.08844, 'close': 1.08846, 'volume': 153.57000732421875}, {'timestamp': Timestamp('2024-01-17 12:45:00'), 'open': 1.0872, 'high': 1.0872, 'low': 1.0871, 'close': 1.08715, 'volume': 216.75}, {'timestamp': Timestamp('2024-01-18 12:45:00'), 'open': 1.08786, 'high': 1.08798, 'low': 1.08786, 'close': 1.08798, 'volume': 135.25999450683594}, {'timestamp': Timestamp('2024-01-19 12:45:00'), 'open': 1.08825, 'high': 1.08825, 'low': 1.08815, 'close': 1.08816, 'volume': 176.00999450683594}, {'timestamp': Timestamp('2024-01-21 22:00:00'), 'open': 1.08906, 'high': 1.08918, 'low': 1.08905, 'close': 1.08909, 'volume': 5.5}, {'timestamp': Timestamp('2024-01-22 12:45:00'), 'open': 1.08977, 'high': 1.08981, 'low': 1.08958, 'close': 1.08962, 'volume': 271.3900146484375}, {'timestamp': Timestamp('2024-01-23 12:45:00'), 'open': 1.08625, 'high': 1.08627, 'low': 1.08616, 'close': 1.08621, 'volume': 221.3300018310547}, {'timestamp': Timestamp('2024-01-24 12:45:00'), 'open': 1.08952, 'high': 1.08956, 'low': 1.0895, 'close': 1.08954, 'volume': 150.52999877929688}, {'timestamp': Timestamp('2024-01-25 12:45:00'), 'open': 1.08919, 'high': 1.08925, 'low': 1.08919, 'close': 1.08921, 'volume': 94.7699966430664}, {'timestamp': Timestamp('2024-01-26 12:45:00'), 'open': 1.08699, 'high': 1.08717, 'low': 1.08693, 'close': 1.08716, 'volume': 134.00999450683594}, {'timestamp': Timestamp('2024-01-28 22:00:00'), 'open': 1.08468, 'high': 1.08484, 'low': 1.08428, 'close': 1.0843, 'volume': 28.0}, {'timestamp': Timestamp('2024-01-29 12:45:00'), 'open': 1.08352, 'high': 1.08364, 'low': 1.08352, 'close': 1.08358, 'volume': 98.5199966430664}, {'timestamp': Timestamp('2024-01-30 12:45:00'), 'open': 1.08428, 'high': 1.08439, 'low': 1.08425, 'close': 1.08439, 'volume': 47.70000076293945}, {'timestamp': Timestamp('2024-01-31 12:45:00'), 'open': 1.08375, 'high': 1.08379, 'low': 1.08366, 'close': 1.08376, 'volume': 199.2100067138672}, {'timestamp': Timestamp('2024-02-01 12:45:00'), 'open': 1.08178, 'high': 1.08184, 'low': 1.08173, 'close': 1.08174, 'volume': 240.8699951171875}]\n"
]
}
]
},
{
"cell_type": "markdown",
"source": [
"# ducoup voici ce que nous récupéront les quoicoupote"
],
"metadata": {
"id": "m2vVLZQEUW8P"
}
},
{
"cell_type": "code",
"source": [
"donnees_365_derniers_jours_liste\n",
"donnees_31_derniers_jours_liste\n",
"donnees_7_derniers_jours_liste\n",
"donnees_30_derniers_jours_liste\n",
"donnees_30_dernieres_minutes"
],
"metadata": {
"colab": {
"base_uri": "https://localhost:8080/",
"height": 193
},
"id": "G3RqMOdVUbdG",
"outputId": "d566ad25-4667-42bd-f882-2b5fbbe4ab5a"
},
"execution_count": null,
"outputs": [
{
"output_type": "error",
"ename": "NameError",
"evalue": "name 'donnees_30_derniers_jours' is not defined",
"traceback": [
"\u001b[0;31m---------------------------------------------------------------------------\u001b[0m",
"\u001b[0;31mNameError\u001b[0m Traceback (most recent call last)",
"\u001b[0;32m<ipython-input-14-7bb42c6ad7cd>\u001b[0m in \u001b[0;36m<cell line: 4>\u001b[0;34m()\u001b[0m\n\u001b[1;32m 2\u001b[0m \u001b[0mdonnees_31_derniers_jours_liste\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[1;32m 3\u001b[0m \u001b[0mdonnees_7_derniers_jours_liste\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0;32m----> 4\u001b[0;31m \u001b[0mdonnees_30_derniers_jours\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n\u001b[0m\u001b[1;32m 5\u001b[0m \u001b[0mdonnees_30_dernieres_minutes\u001b[0m\u001b[0;34m\u001b[0m\u001b[0;34m\u001b[0m\u001b[0m\n",
"\u001b[0;31mNameError\u001b[0m: name 'donnees_30_derniers_jours' is not defined"
]
}
]
}
]
}
|
ef61ec603740460f6bfe74fc8fb3a7ef
|
{
"intermediate": 0.3861789405345917,
"beginner": 0.27531397342681885,
"expert": 0.3385070562362671
}
|
40,966
|
create me a simple react app. I want to deploy it on vercel. it should be stateless and display user information from fetching the fastify api. the app should have a button which should allow the user to signin using their google account. I want to use firebase auth for that. Show me complete code as well as complete setup instructions. Thanks
|
c0a021ae094ae14493dbf5f9ed70ba13
|
{
"intermediate": 0.5830607414245605,
"beginner": 0.21189472079277039,
"expert": 0.20504452288150787
}
|
40,967
|
In a macro enabled word document, is there a VBA code that can auto fit the cell row to the cell content
|
876fdb2d5ed3f90a04922d7ebdfd2eab
|
{
"intermediate": 0.3050267994403839,
"beginner": 0.4698343873023987,
"expert": 0.225138857960701
}
|
40,968
|
I need you to check and verify the below reward calculation function "calculate_reward(self, saturation_value, performance_metrics)" based on the below requirement, whether the given code satisfies the requirement or not in its computation steps. I implemeted this function with in my reinforcement learning approach.
I have total '8' parameters as a target specifications (1 saturation_value + 7 performance_metrics = 8 total target specifications), they are 'saturation_value', 'Area', 'PowerDissipation', 'SlewRate', 'Gain', 'Bandwidth3dB', 'UnityGainFreq', 'PhaseMargin'. I am optimumly tuning the design dimensions of the transistor in twostage opamp circuit to met the objective target specifications.
RL Reward Function condition, The reward is directly related to the design goal ('8' objective target specifications). We define the reward 𝑟𝑖 at each time step 𝑖 as 𝑟𝑖 = 𝑟, if 𝑟 < 0 or 𝑟𝑖 = 𝑅, if 𝑟 = 0, where 𝑟 = summation of min{(𝑔𝑗𝑖 − 𝑔𝑗∗ )/(𝑔𝑗𝑖 + 𝑔𝑗∗), 0}. where, it is a normalized difference between the obtained performance metric values 𝑔𝑖 and the given predetermined performance measures (design specifications) 𝑔∗.
Alter this given condition r, according to my below requirements:
Here my most preference parameter is the 'saturation_value', the desired value must be need to be '1'.
Then among 7 performance_metrics, the desired parameter values of 'Area', 'PowerDissipation' are need to be 'minimize' and
the desired parameter values of 'SlewRate', 'Gain', 'Bandwidth3dB', 'UnityGainFreq', 'PhaseMargin' are need to be 'maximize'.
condition for rewards r, to be alter according to the required minimum and maximim conditions.
The upper bound of 𝑟 is set to be 0 to avoid overoptimizing the parameters once the given design specifications are reached. All 𝑁 design specifications are equally important. We also give a large reward (i.e., 𝑅 = 10) to encourage the agent if the design goals are reached at some step. The episode return 𝑅𝑠0,𝑔∗ of searching optimal design device parameters for the given goals 𝑔∗ starting from an initial state 𝑠0, is the accumulated reward of all steps: 𝑅𝑠0,𝑔∗ = summation of 𝑟𝑖 . Our goal is to train a good policy to maximize 𝑅𝑠0,𝑔∗ .
Objective Target specifications Requirement:
saturation_value = 1 (must be attain)
'Area' lesser than '3e-10' (desired value should be lesser than target)
'PowerDissipation' lesser than '2500' (desired value should be lesser than target)
'SlewRate' greater than '20' (desired value should be greater than target)
'Gain' greater than '70' (desired value should be greater than target)
'Bandwidth3dB' greater than '30e3' (desired value should be greater than target)
'UnityGainFreq' greater than or equal to '30e6' (desired value should be greater than target)
'PhaseMargin' between '60-90' (desired value should be greater than target)
Code:
def calculate_reward(self, saturation_value, performance_metrics):
Area = performance_metrics['Area']
PowerDissipation = performance_metrics['PowerDissipation']
SlewRate = performance_metrics['SlewRate']
Gain = performance_metrics['Gain']
Bandwidth3dB = performance_metrics['Bandwidth3dB']
UnityGainFreq = performance_metrics['UnityGainFreq']
PhaseMargin = performance_metrics['PhaseMargin']
# Check if saturation condition is met
if saturation_value == 1:
reward_saturation = 10 # Large reward if saturation condition is met
else:
reward_saturation = 0
# Calculate reward based on other performance metrics and design specifications
reward = 0
reward += min((Area - 3e-10) / (Area + 3e-10), 0)
reward += min((PowerDissipation - 2500) / (PowerDissipation + 2500), 0)
reward += min((SlewRate - 20) / (SlewRate + 20), 0)
reward += min((Gain - 70) / (Gain + 70), 0)
reward += min((Bandwidth3dB - 30000) / (Bandwidth3dB + 30000), 0)
reward += min((UnityGainFreq - 30000000) / (UnityGainFreq + 30000000), 0)
#reward += min((phase_margin - 60) / (phase_margin + 60), 0)
reward += min((PhaseMargin - 60) / (PhaseMargin + 30), 0)
# Clip the reward to avoid overoptimizing
reward = max(reward, 0)
reward = min(reward, 10)
# Add saturation reward
reward += reward_saturation
return reward
|
98a66543536a149abc225a920042a799
|
{
"intermediate": 0.29169270396232605,
"beginner": 0.41272619366645813,
"expert": 0.29558107256889343
}
|
40,969
|
hi
|
a56d4d0cea105c5c9d0c30505ce5b311
|
{
"intermediate": 0.3246487081050873,
"beginner": 0.27135494351387024,
"expert": 0.40399640798568726
}
|
40,970
|
write tests for rust code
|
b868e2d8fb6e048b5e0a1ee60c994ae6
|
{
"intermediate": 0.2978852391242981,
"beginner": 0.3528819680213928,
"expert": 0.34923282265663147
}
|
40,971
|
Почему YARA правило не компилируется import "hash"
rule footprint_hash
{
strings:
$sha_256_x64 = "61C0810A23580CF492A6BA4F7654566108331E7A4134C968C2D6A05261B2D8A1"
condition:
hash.sha256(0,filesize) == $sha_256_x64
}
|
b5d943737636f1f79f866b2cdd393a27
|
{
"intermediate": 0.43057262897491455,
"beginner": 0.3010638654232025,
"expert": 0.2683635354042053
}
|
40,972
|
Почему данное яро правило не работает import "hash"
rule hash_checkers
{
condition:
hash.sha256(0,filesize) == "61C0810A23580CF492A6BA4F7654566108331E7A4134C968C2D6A05261B2D8A1" or
hash.sha256(0,filesize) =="94795FD89366E01BD6CE6471FF27C3782E2E16377A848426CF0B2E6BAEE9449B"
}
|
a82a1586d9f6f428c2d77061f59ce5d8
|
{
"intermediate": 0.4036903381347656,
"beginner": 0.2905857563018799,
"expert": 0.3057239055633545
}
|
40,973
|
hi
|
f2c5d3821dcf0b6a7be6f9577490a830
|
{
"intermediate": 0.3246487081050873,
"beginner": 0.27135494351387024,
"expert": 0.40399640798568726
}
|
40,974
|
帮我把奖励改为未来20日(最大有利波动-最大不利波动)/(最大不利波动),同时注意把20作为类的默认参数,同时注意没有足够的未来K线时,结束掉本轮:
K线 数组:['Open', 'High', 'Low', 'Close'],注意买卖操作选择未来的low还是high进行最大有利波动和最大不利波动的计算
代码:
class StockTradingEnv(py_environment.PyEnvironment):
num = 1
def __init__(self, stock_data, initial_cash=1000000, history_length=50,
filename=datetime.datetime.now().strftime('%Y%m%d%H%M%S%f'), plt_dir=r'/data/history/', model_name=''):
self._stock_data = np.array(stock_data)
self._history_length = history_length
self._current_step = history_length
self._initial_cash = initial_cash
self._cash_in_hand = initial_cash
self._num_stocks_owned = 0
self._start_step = history_length
self._total_asset_record = [initial_cash] # 记录每一步后的总资产以计算奖励
self._filename = str(StockTradingEnv.num) + "_" + filename
StockTradingEnv.num = StockTradingEnv.num + 1
self._episode_counter = 0 # 跟踪轮数的计数器
self._trades = [] # 贸易详情
self._plt_dir = plt_dir + model_name + '/'
os.makedirs(self._plt_dir, exist_ok=True)
self._observation_spec = array_spec.ArraySpec(
shape=(history_length, 4), dtype=np.float32, name='observation')
# 动作空间定义为0-不操作,1-做多,2-做空(买卖同一量的股票)
self._action_spec = array_spec.BoundedArraySpec(
shape=(), dtype=np.int32, minimum=0, maximum=2, name='action')
self._episode_ended = False
def action_spec(self):
return self._action_spec
def observation_spec(self):
return self._observation_spec
def _step(self, action):
if self._episode_ended:
return self.reset()
current_stock_price = self._stock_data[self._current_step][3]
if action == 1: # 做多/买入
if self._cash_in_hand >= current_stock_price:
self._num_stocks_owned += 1
self._cash_in_hand -= current_stock_price
self._trades.append((self._current_step, 'buy'))
elif action == 2: # 做空/卖出
if self._num_stocks_owned > 0:
self._num_stocks_owned -= 1
self._cash_in_hand += current_stock_price
self._trades.append((self._current_step, 'sell'))
# 更新状态和步数
self._current_step += 1
if self._current_step >= len(self._stock_data):
self._episode_ended = True
# 记录图表数据
new_total_asset = self._cash_in_hand + self._num_stocks_owned * current_stock_price
reward = new_total_asset - self._initial_cash # 资产增加的部分作为奖励
self._total_asset_record.append(new_total_asset)
if self._episode_ended:
self._episode_counter += 1 # 跟踪轮数的计数器增加
self._draw_charts() # 绘制并保存K线图和资金变化曲线
return ts.termination(np.array(self._state, dtype=np.float32), new_total_asset - self._initial_cash)
else:
self._state = self._get_observation()
return ts.transition(np.array(self._state, dtype=np.float32), reward=reward, discount=1.0)
|
a9d41d06676e377f4cacbd2aa5e7751a
|
{
"intermediate": 0.3395599126815796,
"beginner": 0.47565680742263794,
"expert": 0.1847831755876541
}
|
40,975
|
fow to fix: npm run build
> modern-portfolio@1.0.1 build
> next build
Linting and checking validity of types ... ⨯ ESLint: Failed to load config "next/babel" to extend from. Referenced from: C:\xampp\htdocs\NEWS\.eslintrc.json
✓ Linting and checking validity of types
Creating an optimized production build ..<w> [webpack.cache.PackFileCacheStrategy] Skipped not serializable cache item 'Compilation/modules|C:\xampp\htdocs\NEWS\node_modules\next\dist\build\webpack\loaders\css-loader\src\index.js??ruleSet[1].rules[7].oneOf[13].use[1]!C:\xampp\htdocs\NEWS\node_modules\next\dist\build\webpack\loaders\postcss-loader\src\index.js??ruleSet[1].rules[7].oneOf[13].use[2]!C:\xampp\htdocs\NEWS\styles\globals.css': No serializer registered for Warning
<w> while serializing webpack/lib/cache/PackFileCacheStrategy.PackContentItems -> webpack/lib/NormalModule -> Array { 1 items } -> webpack/lib/ModuleWarning -> Warning
⚠ Compiled with warnings
./styles/globals.css.webpack[javascript/auto]!=!./node_modules/next/dist/build/webpack/loaders/css-loader/src/index.js??ruleSet[1].rules[7].oneOf[13].use[1]!./node_modules/next/dist/build/webpack/loaders/postcss-loader/src/index.js??ruleSet[1].rules[7].oneOf[13].use[2]!./styles/globals.css
Warning
(182:5) Nested CSS was detected, but CSS nesting has not been configured correctly.
Please enable a CSS nesting plugin *before* Tailwind in your configuration.
See how here: https://tailwindcss.com/docs/using-with-preprocessors#nesting
Import trace for requested module:
./styles/globals.css.webpack[javascript/auto]!=!./node_modules/next/dist/build/webpack/loaders/css-loader/src/index.js??ruleSet[1].rules[7].oneOf[13].use[1]!./node_modules/next/dist/build/webpack/loaders/postcss-loader/src/index.js??ruleSet[1].rules[7].oneOf[13].use[2]!./styles/globals.css
./styles/globals.css
unhandledRejection TypeError: Cannot read properties of null (reading 'useEffect')
at exports.useEffect (C:\xampp\htdocs\NEWS\node_modules\react\cjs\react.production.min.js:24:292)
at C:\xampp\htdocs\NEWS\.next\server\chunks\506.js:1:7925 {
type: 'TypeError'
}
|
2a0cb0fb51c035f77474984a73085b92
|
{
"intermediate": 0.5674929618835449,
"beginner": 0.2870066463947296,
"expert": 0.14550037682056427
}
|
40,976
|
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 2440, in predict_function *
return step_function(self, iterator)
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 2425, in step_function **
outputs = model.distribute_strategy.run(run_step, args=(data,))
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 2413, in run_step **
outputs = model.predict_step(data)
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/training.py", line 2381, in predict_step
return self(x, training=False)
File "/usr/local/lib/python3.10/dist-packages/keras/src/utils/traceback_utils.py", line 70, in error_handler
raise e.with_traceback(filtered_tb) from None
File "/usr/local/lib/python3.10/dist-packages/keras/src/engine/input_spec.py", line 253, in assert_input_compatibility
raise ValueError(
ValueError: Exception encountered when calling layer 'sequential_5' (type Sequential).
Input 0 of layer "dense_8" is incompatible with the layer: expected min_ndim=2, found ndim=1. Full shape received: (None,)
Call arguments received by layer 'sequential_5' (type Sequential):
• inputs=tf.Tensor(shape=(None,), dtype=int64)
• training=False
• mask=None
|
474a88a226f53887a1dc88273bf6d570
|
{
"intermediate": 0.3699388802051544,
"beginner": 0.3376385569572449,
"expert": 0.2924225330352783
}
|
40,977
|
import requests
import pandas as pd
from time import sleep
# مجموعة لتخزين الأحداث المطبوعة بالفعل لتجنب التكرار
printed_events = set()
def fetch_live_players(printed_events):
fixtures_url = 'https://fantasy.premierleague.com/api/fixtures/'
fixtures_response = requests.get(fixtures_url)
players_url = 'https://fantasy.premierleague.com/api/bootstrap-static/'
players_response = requests.get(players_url)
if fixtures_response.status_code == 200 and players_response.status_code == 200:
fixtures_data = fixtures_response.json()
players_data = players_response.json()
live_fixtures = [fixture for fixture in fixtures_data if not fixture['finished'] and fixture['started']]
players_df = pd.DataFrame(players_data['elements'])
teams_df = pd.DataFrame(players_data['teams'])
if live_fixtures:
for fixture in live_fixtures:
event_id = fixture['event']
live_url = f'https://fantasy.premierleague.com/api/event/{event_id}/live/'
live_response = requests.get(live_url)
if live_response.status_code == 200:
live_data = live_response.json()['elements']
for element in live_data:
element_info = players_df.loc[players_df['id'] == element['id']].iloc[0]
team_info = teams_df.loc[teams_df['id'] == element_info['team']].iloc[0]
player_name = element_info['web_name']
stats = element['stats']
# تحقق من الأهداف
if stats['goals_scored'] > 0:
event_key = f"{player_name}-goal-{stats['goals_scored']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Goal⚽: {player_name}, {team_info['name']}- P: {stats['goals_scored']*6}, Tot: {stats['total_points']}")
# تحقق من الأسيست
if stats['assists'] > 0:
event_key = f"{player_name}-assist-{stats['assists']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Assist🅰️: {player_name}, {team_info['name']}- P: {stats['assists']*3}, Tot: {stats['total_points']}")
# تحقق من البطاقات الصفراء
if stats['yellow_cards'] > 0:
event_key = f"{player_name}-yellow-{stats['yellow_cards']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Yellow Card🟨: #{player_name}, #{team_info['name']}- P: -{stats['yellow_cards']*1}, Tot: {stats['total_points']}")
# تحقق من البطاقات الحمراء
if stats['red_cards'] > 0:
event_key = f"{player_name}-red-{stats['red_cards']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Red Card: {player_name}, {team_info['name']}, Event Points: -{stats['red_cards']*3}, Total Points: {stats['total_points']}")
else:
print('Failed to retrieve data.')
# نقطة بدء البرنامج
def main():
while True:
try:
fetch_live_players(printed_events)
except Exception as e:
print(e) # لدينا الآن الخطأ المطبوع في حال حدوث أي استثناء
sleep(60) # فترة الانتظار بين كل تحقق هي 60 ثانية
if __name__ == "__main__":
main(). اريد تعديل على السكريبت حيث يكون عرض الاحداث حسب الوقت الفعلي الحصري يعني يكون ترتيب في العرض مع تشغيل السكريبت .ايضا أضف إلى السكريبت عرض lineup الحصري للفريق في المبارة المعينة في الجولة الحالية مع توفره
|
854f73f6fef7d670bbcfb7e667eb9bf1
|
{
"intermediate": 0.40353623032569885,
"beginner": 0.3928612172603607,
"expert": 0.20360258221626282
}
|
40,978
|
Mo(y,x | C,S) 这个公式在 Malmquist Productivity Indexes 是什么意思
|
c91c5663e6ede295f6a6867237c1051c
|
{
"intermediate": 0.2712824046611786,
"beginner": 0.5694307088851929,
"expert": 0.15928684175014496
}
|
40,979
|
%В пролог-программе заданы факты вида parent (a,b): a - родитель для b, и факты вида man(c): с - мужчина. Используя эти предикаты, определить:
% a) отношение grand_nephew(X,Y): X является внучатым племянником для Y (т.е. X - внук сестры или брата Y).
% б) отношение has cousin Kim(X): у человека X есть двоюродный брат или сестра по имени Kim.
implement main
open core
domains
gender = female(); male().
class facts - familyDB
person : (string Name, gender Gender).
parent : (string Person, string Parent).
class predicates
father : (string Person, string Father) nondeterm anyflow.
clauses
father(Person, Father) :-
parent(Person, Father),
person(Father, male()).
class predicates
grandFather : (string Person, string GrandFather) nondeterm (o,o).
clauses
grandFather(Person, GrandFather) :-
parent(Person, Parent),
father(Parent, GrandFather).
class predicates
ancestor : (string Person, string Ancestor) nondeterm (i,o).
clauses
ancestor(Person, Ancestor) :-
parent(Person, Ancestor).
ancestor(Person, Ancestor) :-
parent(Person, P1),
ancestor(P1, Ancestor).
class predicates
reconsult : (string FileName).
clauses
reconsult(FileName) :-
retractFactDB(familyDB),
file::consult(FileName, familyDB).
clauses
person("Judith",female()).
person("Bill",male()).
person("John",male()).
person("Pam",female()).
parent("John","Judith").
parent("Bill","John").
parent("Pam","Bill").
clauses
run():-
console::init(),
stdIO:: write("\nfather test\n"),
father(X, Y),
stdIO::writef("% is the father of %\n",
Y, X),
fail.
run():-
stdIO:: write("\ngrandfather test\n"),
grandFather(X, Y),
stdIO:: writef("% is the grandfather of %\n", Y, X),
fail.
run():-
stdIO:: write("\n ancestor of Pam test\n"),
X = "Pam",
ancestor(X, Y),
stdIO:: writef("% is the ancestor of %\n", Y, X),
fail.
run():-
stdIO:: write("End of test\n"),
programControl:: sleep(9000),
succeed().
end implement main
goal
mainExe::run(main::run).
|
9490ccec60c34a710ed49fbc62644867
|
{
"intermediate": 0.30719733238220215,
"beginner": 0.4533156752586365,
"expert": 0.239486962556839
}
|
40,980
|
В пролог-программе Visual Prolog 7.5. заданы факты вида parent (a,b): a - родитель для b, и факты вида man©: с - мужчина. Используя эти предикаты, определить:
a) отношение grand_nephew(X,Y): X является внучатым племянником для Y (т.е. X - внук сестры или брата Y).
б) отношение has cousin Kim(X): у человека X есть двоюродный брат или сестра по имени Kim.
Пример кода:
implement main
open core
domains
gender = female(); male().
class facts - familyDB
person : (string Name, gender Gender).
parent : (string Person, string Parent).
class predicates
father : (string Person, string Father) nondeterm anyflow.
clauses
father(Person, Father) :-
parent(Person, Father),
person(Father, male()).
class predicates
grandFather : (string Person, string GrandFather) nondeterm (o,o).
clauses
grandFather(Person, GrandFather) :-
parent(Person, Parent),
father(Parent, GrandFather).
class predicates
ancestor : (string Person, string Ancestor) nondeterm (i,o).
clauses
ancestor(Person, Ancestor) :-
parent(Person, Ancestor).
ancestor(Person, Ancestor) :-
parent(Person, P1),
ancestor(P1, Ancestor).
class predicates
reconsult : (string FileName).
clauses
reconsult(FileName) :-
retractFactDB(familyDB),
file::consult(FileName, familyDB).
clauses
person(“Judith”,female()).
person(“Bill”,male()).
person(“John”,male()).
person(“Pam”,female()).
parent(“John”,“Judith”).
parent(“Bill”,“John”).
parent(“Pam”,“Bill”).
clauses
run():-
console::init(),
stdIO:: write(“\nfather test\n”),
father(X, Y),
stdIO::writef(“% is the father of %\n”,
Y, X),
fail.
run():-
stdIO:: write(“\ngrandfather test\n”),
grandFather(X, Y),
stdIO:: writef(“% is the grandfather of %\n”, Y, X),
fail.
run():-
stdIO:: write(“\n ancestor of Pam test\n”),
X = “Pam”,
ancestor(X, Y),
stdIO:: writef(“% is the ancestor of %\n”, Y, X),
fail.
run():-
stdIO:: write(“End of test\n”),
programControl:: sleep(9000),
succeed().
end implement main
goal
mainExe::run(main::run).
Полностью написать код решения задания
|
1c12443474884ddc2e4e842e33b69972
|
{
"intermediate": 0.39820048213005066,
"beginner": 0.3547653257846832,
"expert": 0.24703410267829895
}
|
40,981
|
write the code to ypdate python i jupyter notebook
|
48a907d7ec2b7b0ffc76667bf3da5fbc
|
{
"intermediate": 0.5026483535766602,
"beginner": 0.18707607686519623,
"expert": 0.3102755844593048
}
|
40,982
|
import requests
import pandas as pd
from time import sleep
# مجموعة لتخزين الأحداث المطبوعة بالفعل لتجنب التكرار
printed_events = set()
def fetch_live_players(printed_events):
fixtures_url = 'https://fantasy.premierleague.com/api/fixtures/'
fixtures_response = requests.get(fixtures_url)
players_url = 'https://fantasy.premierleague.com/api/bootstrap-static/'
players_response = requests.get(players_url)
if fixtures_response.status_code == 200 and players_response.status_code == 200:
fixtures_data = fixtures_response.json()
players_data = players_response.json()
live_fixtures = [fixture for fixture in fixtures_data if not fixture['finished'] and fixture['started']]
players_df = pd.DataFrame(players_data['elements'])
teams_df = pd.DataFrame(players_data['teams'])
if live_fixtures:
for fixture in live_fixtures:
event_id = fixture['event']
live_url = f'https://fantasy.premierleague.com/api/event/{event_id}/live/'
live_response = requests.get(live_url)
if live_response.status_code == 200:
live_data = live_response.json()['elements']
for element in live_data:
element_info = players_df.loc[players_df['id'] == element['id']].iloc[0]
team_info = teams_df.loc[teams_df['id'] == element_info['team']].iloc[0]
player_name = element_info['web_name']
stats = element['stats']
# تحقق من الأهداف
if stats['goals_scored'] > 0:
event_key = f"{player_name}-goal-{stats['goals_scored']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Goal⚽: {player_name}, {team_info['name']}- P: {stats['goals_scored']*6}, Tot: {stats['total_points']}")
# تحقق من الأسيست
if stats['assists'] > 0:
event_key = f"{player_name}-assist-{stats['assists']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Assist🅰️: {player_name}, {team_info['name']}- P: {stats['assists']*3}, Tot: {stats['total_points']}")
# تحقق من البطاقات الصفراء
if stats['yellow_cards'] > 0:
event_key = f"{player_name}-yellow-{stats['yellow_cards']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Yellow Card🟨: #{player_name}, #{team_info['name']}- P: -{stats['yellow_cards']*1}, Tot: {stats['total_points']}")
# تحقق من البطاقات الحمراء
if stats['red_cards'] > 0:
event_key = f"{player_name}-red-{stats['red_cards']}"
if event_key not in printed_events:
printed_events.add(event_key)
print(f"Red Card: {player_name}, {team_info['name']}, Event Points: -{stats['red_cards']*3}, Total Points: {stats['total_points']}")
else:
print('Failed to retrieve data.')
# نقطة بدء البرنامج
def main():
while True:
try:
fetch_live_players(printed_events)
except Exception as e:
print(e) # لدينا الآن الخطأ المطبوع في حال حدوث أي استثناء
sleep(60) # فترة الانتظار بين كل تحقق هي 60 ثانية
if __name__ == "__main__":
main()
اريد تعديل في هذا السكريبت حيث يطبع ويعرض الحدث بمجرد ظهوره بالتوازي مع الوقت الفعلي والحي للحدث
اريد كذلك إضافة عرض حدث التشكيلات حسب الوقت الحي lineup
|
debd007e19a62f2d9ff0cbbdca97ce45
|
{
"intermediate": 0.40353623032569885,
"beginner": 0.3928612172603607,
"expert": 0.20360258221626282
}
|
40,983
|
const username = process.env.WEB_USERNAME || "admin";
const password = process.env.WEB_PASSWORD || "password";
const url = `https://${process.env.PROJECT_DOMAIN}.glitch.me`;
const port = process.env.PORT || 3000;
const express = require("express");
const app = express();
var exec = require("child_process").exec;
const os = require("os");
const { createProxyMiddleware } = require("http-proxy-middleware");
var request = require("request");
var fs = require("fs");
var path = require("path");
const auth = require("basic-auth");
app.get("/", function (req, res) {
res.send("hello world");
});
// 页面访问密码
app.use((req, res, next) => {
const user = auth(req);
if (user && user.name === username && user.pass === password) {
return next();
}
res.set("WWW-Authenticate", 'Basic realm="Node"');
return res.status(401).send();
});
// 获取系统进程表
app.get("/status", function (req, res) {
let cmdStr =
"ps -ef";
exec(cmdStr, function (err, stdout, stderr) {
if (err) {
res.type("html").send("<pre>命令行执行错误:\n" + err + "</pre>");
} else {
res.type("html").send("<pre>获取系统进程表:\n" + stdout + "</pre>");
}
});
});
// 获取系统监听端口
app.get("/listen", function (req, res) {
let cmdStr = "ss -nltp";
exec(cmdStr, function (err, stdout, stderr) {
if (err) {
res.type("html").send("<pre>命令行执行错误:\n" + err + "</pre>");
} else {
res.type("html").send("<pre>获取系统监听端口:\n" + stdout + "</pre>");
}
});
});
//获取节点数据
app.get("/list", function (req, res) {
let cmdStr = "cat list";
exec(cmdStr, function (err, stdout, stderr) {
if (err) {
res.type("html").send("<pre>命令行执行错误:\n" + err + "</pre>");
} else {
res.type("html").send("<pre>节点数据:\n\n" + stdout + "</pre>");
}
});
});
// 获取系统版本、内存信息
app.get("/info", function (req, res) {
let cmdStr = "cat /etc/*release | grep -E ^NAME";
exec(cmdStr, function (err, stdout, stderr) {
if (err) {
res.send("命令行执行错误:" + err);
} else {
res.send(
"命令行执行结果:\n" +
"Linux System:" +
stdout +
"\nRAM:" +
os.totalmem() / 1000 / 1000 +
"MB"
);
}
});
});
//文件系统只读测试
app.get("/test", function (req, res) {
let cmdStr = 'mount | grep " / " | grep "(ro," >/dev/null';
exec(cmdStr, function (error, stdout, stderr) {
if (error !== null) {
res.send("系统权限为---非只读");
} else {
res.send("系统权限为---只读");
}
});
});
// 启动root
app.get("/root", function (req, res) {
let cmdStr = "bash root.sh >/dev/null 2>&1 &";
exec(cmdStr, function (err, stdout, stderr) {
if (err) {
res.send("root权限部署错误:" + err);
} else {
res.send("root权限执行结果:" + "启动成功!");
}
});
});
// keepalive begin
//web保活
function keep_web_alive() {
// 1.请求主页,保持唤醒
exec("curl -m5 " + url, function (err, stdout, stderr) {
if (err) {
console.log("保活-请求主页-命令行执行错误:" + err);
} else {
console.log("保活-请求主页-命令行执行成功,响应报文:" + stdout);
}
});
/* exec("ping -c1 " + url, function (err, stdout, stderr) {
if (err) {
console.log("保活-请求主页-命令行执行错误:" + err);
} else {
console.log("保活-请求主页-命令行执行成功,响应报文:" + stdout);
}
});
*/
// 2.请求服务器进程状态列表,若web没在运行,则调起
exec("pgrep -laf web.js", function (err, stdout, stderr) {
// 1.查后台系统进程,保持唤醒
if (stdout.includes("./web.js -c ./config.json")) {
console.log("web 正在运行");
} else {
//web 未运行,命令行调起
exec(
"chmod +x web.js && ./web.js -c ./config.json >/dev/null 2>&1 &",
function (err, stdout, stderr) {
if (err) {
console.log("保活-调起web-命令行执行错误:" + err);
} else {
console.log("保活-调起web-命令行执行成功!");
}
}
);
}
});
}
setInterval(keep_web_alive, 10 * 1000);
//Argo保活
function keep_argo_alive() {
exec("pgrep -laf cloudflared", function (err, stdout, stderr) {
// 1.查后台系统进程,保持唤醒
if (stdout.includes("./cloudflared tunnel")) {
console.log("Argo 正在运行");
} else {
//Argo 未运行,命令行调起
exec("bash argo.sh 2>&1 &", function (err, stdout, stderr) {
if (err) {
console.log("保活-调起Argo-命令行执行错误:" + err);
} else {
console.log("保活-调起Argo-命令行执行成功!");
}
});
}
});
}
setInterval(keep_argo_alive, 30 * 1000);
//哪吒保活
function keep_nezha_alive() {
exec("pgrep -laf nezha-agent", function (err, stdout, stderr) {
// 1.查后台系统进程,保持唤醒
if (stdout.includes("./nezha-agent")) {
console.log("哪吒正在运行");
} else {
//哪吒未运行,命令行调起
exec("bash nezha.sh 2>&1 &", function (err, stdout, stderr) {
if (err) {
console.log("保活-调起哪吒-命令行执行错误:" + err);
} else {
console.log("保活-调起哪吒-命令行执行成功!");
}
});
}
});
}
setInterval(keep_nezha_alive, 45 * 1000);
// keepalive end
//下载web可执行文件
app.get("/download", function (req, res) {
download_web((err) => {
if (err) {
res.send("下载文件失败");
} else {
res.send("下载文件成功");
}
});
});
app.use(
"/",
createProxyMiddleware({
changeOrigin: true, // 默认false,是否需要改变原始主机头为目标URL
onProxyReq: function onProxyReq(proxyReq, req, res) {},
pathRewrite: {
// 请求中去除/
"^/": "/",
},
target: "http://127.0.0.1:8080/", // 需要跨域处理的请求地址
ws: true, // 是否代理websockets
})
);
//初始化,下载web
function download_web(callback) {
let fileName = "web.js";
let web_url =
"https://github.com/fscarmen2/Argo-X-Container-PaaS/raw/main/files/web.js";
let stream = fs.createWriteStream(path.join("./", fileName));
request(web_url)
.pipe(stream)
.on("close", function (err) {
if (err) {
callback("下载文件失败");
} else {
callback(null);
}
});
}
download_web((err) => {
if (err) {
console.log("初始化-下载web文件失败");
} else {
console.log("初始化-下载web文件成功");
}
});
// 启动核心脚本运行web,哪吒和argo
exec("bash entrypoint.sh", function (err, stdout, stderr) {
if (err) {
console.error(err);
return;
}
console.log(stdout);
});
app.listen(port, () => console.log(`Example app listening on port ${port}!`));
把这段function download_web(callback) {
let fileName = "web.js";
let web_url =
"https://github.com/fscarmen2/Argo-X-Container-PaaS/raw/main/files/web.js";
let stream = fs.createWriteStream(path.join("./", fileName));
request(web_url)
.pipe(stream)
.on("close", function (err) {
if (err) {
callback("下载文件失败");
} else {
callback(null);
}
});
}
内容进行混淆处理
|
e96aa3d8af102e63bad74479493224cb
|
{
"intermediate": 0.2065492868423462,
"beginner": 0.6536137461662292,
"expert": 0.13983701169490814
}
|
40,984
|
Google 608 m3 60u IPv6 0x23d51ba66e3ad9bd 0t0 TCP localhost:7679 (LISTEN)
Google 608 m3 123u IPv4 0x23d51b9cd61d1a1d 0t0 TCP 192.168.10.2:49949->den16s08-in-f10.1e100.net:https (ESTABLISHED)
Google 608 m3 141u IPv4 0x23d51b9cd8c68f75 0t0 TCP 192.168.10.2:56208->den16s08-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 21u IPv4 0x23d51b9cd61c6a1d 0t0 TCP 192.168.10.2:52544->ix-in-f188.1e100.net:5228 (ESTABLISHED)
Google 932 m3 22u IPv4 0x23d51b9cd8c05a1d 0t0 TCP 192.168.10.2:56167->server-108-156-201-62.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 25u IPv4 0x23d51b9cd8c5bf75 0t0 TCP 192.168.10.2:56213->edge.presslabs.net:https (ESTABLISHED)
Google 932 m3 26u IPv4 0x23d51b9cd88efa1d 0t0 TCP 192.168.10.2:56287->74.119.118.149:https (ESTABLISHED)
Google 932 m3 28u IPv4 0x23d51b9cd61daa1d 0t0 TCP 192.168.10.2:55491->edge-star-shv-02-bom2.facebook.com:https (ESTABLISHED)
Google 932 m3 31u IPv4 0x23d51b9cd8bfd4c5 0t0 TCP 192.168.10.2:56133->den16s09-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 32u IPv4 0x23d51b9cd8bd1a1d 0t0 TCP 192.168.10.2:55492->edge-star-shv-02-bom2.facebook.com:https (ESTABLISHED)
Google 932 m3 33u IPv4 0x23d51b9cd8c774c5 0t0 TCP 192.168.10.2:53877->edge-z-p3-shv-01-den2.facebook.com:https (ESTABLISHED)
Google 932 m3 35u IPv4 0x23d51b9cd8be7a1d 0t0 TCP 192.168.10.2:55494->edge-star-shv-02-bom2.facebook.com:https (ESTABLISHED)
Google 932 m3 36u IPv4 0x23d51b9cd8c13a1d 0t0 TCP 192.168.10.2:54675->64.70.201.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 37u IPv4 0x23d51b9cd8c76a1d 0t0 TCP 192.168.10.2:56104->ec2-52-10-248-239.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 39u IPv4 0x23d51b9cd8c02a1d 0t0 TCP 192.168.10.2:56037->1.80.190.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 41u IPv4 0x23d51b9cd8c124c5 0t0 TCP 192.168.10.2:56193->den08s06-in-f1.1e100.net:https (ESTABLISHED)
Google 932 m3 43u IPv4 0x23d51b9cd61b74c5 0t0 TCP 192.168.10.2:54595->32.140.73.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 44u IPv4 0x23d51b9cd61dbf75 0t0 TCP 192.168.10.2:54727->ec2-44-242-13-161.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 45u IPv4 0x23d51b9cd8c94f75 0t0 TCP 192.168.10.2:56106->192-152-130-134.v4.mynextlight.net:https (ESTABLISHED)
Google 932 m3 47u IPv4 0x23d51b9cd8c60f75 0t0 TCP 192.168.10.2:56192->den16s08-in-f3.1e100.net:https (ESTABLISHED)
Google 932 m3 48u IPv4 0x23d51b9cd8c12f75 0t0 TCP 192.168.10.2:54665->ec2-18-209-191-209.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 50u IPv4 0x23d51b9cd61bca1d 0t0 TCP 192.168.10.2:56034->ec2-34-212-185-237.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 51u IPv4 0x23d51b9cd8cab4c5 0t0 TCP 192.168.10.2:56135->den16s09-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 52u IPv4 0x23d51b9cd8bd24c5 0t0 TCP 192.168.10.2:54709->222.9.211.130.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 53u IPv4 0x23d51b9cd61b4a1d 0t0 TCP 192.168.10.2:54717->edge-dgw-shv-01-den2.facebook.com:https (ESTABLISHED)
Google 932 m3 54u IPv4 0x23d51b9cd61c24c5 0t0 TCP 192.168.10.2:56210->20.125.209.212:https (ESTABLISHED)
Google 932 m3 55u IPv4 0x23d51b9cd8bf24c5 0t0 TCP 192.168.10.2:56190->104.18.32.137:https (ESTABLISHED)
Google 932 m3 56u IPv4 0x23d51b9cd61b3f75 0t0 TCP 192.168.10.2:56118->den08s05-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 57u IPv4 0x23d51b9cd8c684c5 0t0 TCP 192.168.10.2:56188->192-152-130-134.v4.mynextlight.net:https (ESTABLISHED)
Google 932 m3 58u IPv4 0x23d51b9cd88f24c5 0t0 TCP 192.168.10.2:56117->den08s06-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 60u IPv4 0x23d51b9cd8c034c5 0t0 TCP 192.168.10.2:56121->den08s05-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 61u IPv4 0x23d51b9cd8c5b4c5 0t0 TCP 192.168.10.2:56140->104.18.43.226:https (ESTABLISHED)
Google 932 m3 62u IPv4 0x23d51b9cd61bd4c5 0t0 TCP 192.168.10.2:56438->8.43.72.44:https (ESTABLISHED)
Google 932 m3 63u IPv4 0x23d51b9cd8becf75 0t0 TCP 192.168.10.2:56269->20.127.253.7:https (ESTABLISHED)
Google 932 m3 64u IPv4 0x23d51b9cd8ca9f75 0t0 TCP 192.168.10.2:56172->172.64.155.119:https (ESTABLISHED)
Google 932 m3 65u IPv4 0x23d51b9cd8bd0f75 0t0 TCP 192.168.10.2:56267->server-143-204-29-17.den50.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 66u IPv4 0x23d51b9cd61d94c5 0t0 TCP 192.168.10.2:56171->104.18.130.236:https (ESTABLISHED)
Google 932 m3 67u IPv4 0x23d51b9cd8bada1d 0t0 TCP 192.168.10.2:56174->172.64.155.119:https (ESTABLISHED)
Google 932 m3 68u IPv4 0x23d51b9cd61d04c5 0t0 TCP 192.168.10.2:56176->den08s05-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 69u IPv4 0x23d51b9cd61cef75 0t0 TCP 192.168.10.2:56396->den08s06-in-f1.1e100.net:https (ESTABLISHED)
Google 932 m3 70u IPv4 0x23d51b9cd88ec4c5 0t0 TCP 192.168.10.2:56242->22.62.117.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 71u IPv4 0x23d51b9cd88f04c5 0t0 TCP 192.168.10.2:56169->104.18.130.236:https (ESTABLISHED)
Google 932 m3 72u IPv4 0x23d51b9cd8bacf75 0t0 TCP 192.168.10.2:56240->server-143-204-29-94.den50.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 73u IPv4 0x23d51b9cd8bac4c5 0t0 TCP 192.168.10.2:56170->server-18-160-143-16.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 74u IPv4 0x23d51b9cd8c0df75 0t0 TCP 192.168.10.2:56175->server-143-204-34-174.den50.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 76u IPv4 0x23d51b9cd8bcba1d 0t0 TCP 192.168.10.2:56173->den16s08-in-f3.1e100.net:https (ESTABLISHED)
Google 932 m3 77u IPv4 0x23d51b9cd88f3a1d 0t0 TCP 192.168.10.2:56141->172.64.147.34:https (ESTABLISHED)
Google 932 m3 78u IPv4 0x23d51b9cd8c64f75 0t0 TCP 192.168.10.2:56142->den08s06-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 79u IPv4 0x23d51b9cd8c58a1d 0t0 TCP 192.168.10.2:56143->104.18.131.236:https (ESTABLISHED)
Google 932 m3 80u IPv4 0x23d51b9cd8bcef75 0t0 TCP 192.168.10.2:56177->den16s08-in-f3.1e100.net:https (ESTABLISHED)
Google 932 m3 81u IPv4 0x23d51b9cd8c6cf75 0t0 TCP 192.168.10.2:56035->ec2-34-212-185-237.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 82u IPv4 0x23d51b9cd61df4c5 0t0 TCP 192.168.10.2:56146->den16s09-in-f8.1e100.net:https (ESTABLISHED)
Google 932 m3 83u IPv4 0x23d51b9cd8c624c5 0t0 TCP 192.168.10.2:56178->server-108-156-201-33.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 84u IPv4 0x23d51b9cd8c5f4c5 0t0 TCP 192.168.10.2:56125->den08s05-in-f22.1e100.net:https (ESTABLISHED)
Google 932 m3 85u IPv4 0x23d51b9cd8bf9f75 0t0 TCP 192.168.10.2:56288->188.165.107.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 86u IPv4 0x23d51b9cd88f74c5 0t0 TCP 192.168.10.2:56148->104.18.131.236:https (ESTABLISHED)
Google 932 m3 87u IPv4 0x23d51b9cd8bfca1d 0t0 TCP 192.168.10.2:56436->ec2-52-9-64-98.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 88u IPv4 0x23d51b9cd61d7f75 0t0 TCP 192.168.10.2:56149->den08s05-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 89u IPv4 0x23d51b9cd8c5d4c5 0t0 TCP 192.168.10.2:56151->den08s06-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 90u IPv4 0x23d51b9cd8c04f75 0t0 TCP 192.168.10.2:56144->151.101.68.193:https (ESTABLISHED)
Google 932 m3 91u IPv4 0x23d51b9cd8cbaa1d 0t0 TCP 192.168.10.2:56412->23.227.151.162:https (ESTABLISHED)
Google 932 m3 92u IPv4 0x23d51b9cd61b54c5 0t0 TCP 192.168.10.2:56194->den16s08-in-f3.1e100.net:https (ESTABLISHED)
Google 932 m3 93u IPv4 0x23d51b9cd8be9a1d 0t0 TCP 192.168.10.2:56183->den16s09-in-f8.1e100.net:https (ESTABLISHED)
Google 932 m3 94u IPv4 0x23d51b9cd8c544c5 0t0 TCP 192.168.10.2:56200->den08s05-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 95u IPv4 0x23d51b9cd8c0a4c5 0t0 TCP 192.168.10.2:56180->den16s08-in-f3.1e100.net:https (ESTABLISHED)
Google 932 m3 96u IPv4 0x23d51b9cd88f7f75 0t0 TCP 192.168.10.2:56157->den08s06-in-f1.1e100.net:https (ESTABLISHED)
Google 932 m3 98u IPv4 0x23d51b9cd61cfa1d 0t0 TCP 192.168.10.2:56397->ec2-34-225-78-58.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 99u IPv4 0x23d51b9cd8c9c4c5 0t0 TCP 192.168.10.2:56048->183.113.95.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 100u IPv4 0x23d51b9cd8bfea1d 0t0 TCP 192.168.10.2:56185->den08s06-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 101u IPv4 0x23d51b9cd61b8a1d 0t0 TCP 192.168.10.2:56158->den16s09-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 102u IPv4 0x23d51b9cd61c4a1d 0t0 TCP 192.168.10.2:56181->den16s08-in-f3.1e100.net:https (ESTABLISHED)
Google 932 m3 103u IPv4 0x23d51b9cd88df4c5 0t0 TCP 192.168.10.2:56153->den16s08-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 104u IPv4 0x23d51b9cd8cb6f75 0t0 TCP 192.168.10.2:56179->server-143-204-34-174.den50.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 106u IPv4 0x23d51b9cd8c69a1d 0t0 TCP 192.168.10.2:56166->den08s06-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 107u IPv4 0x23d51b9cd8cadf75 0t0 TCP 192.168.10.2:56195->172.66.42.209:https (ESTABLISHED)
Google 932 m3 108u IPv4 0x23d51b9cd8c754c5 0t0 TCP 192.168.10.2:56159->den08s06-in-f1.1e100.net:https (ESTABLISHED)
Google 932 m3 110u IPv4 0x23d51b9cd8c6a4c5 0t0 TCP 192.168.10.2:56255->ec2-35-84-163-233.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 111u IPv4 0x23d51b9cd8c6e4c5 0t0 TCP 192.168.10.2:56211->a-0001.a-msedge.net:https (ESTABLISHED)
Google 932 m3 112u IPv4 0x23d51b9cd8bee4c5 0t0 TCP 192.168.10.2:56165->den08s05-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 113u IPv4 0x23d51b9cd8baba1d 0t0 TCP 192.168.10.2:56163->den16s09-in-f6.1e100.net:https (ESTABLISHED)
Google 932 m3 114u IPv4 0x23d51b9cd8c5ca1d 0t0 TCP 192.168.10.2:56201->ny-in-f156.1e100.net:https (ESTABLISHED)
Google 932 m3 115u IPv4 0x23d51b9cd8cb3a1d 0t0 TCP 192.168.10.2:56199->den08s06-in-f3.1e100.net:https (ESTABLISHED)
Google 932 m3 116u IPv4 0x23d51b9cd88f2f75 0t0 TCP 192.168.10.2:56402->243.101.201.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 118u IPv4 0x23d51b9cd88f6a1d 0t0 TCP 192.168.10.2:56196->den16s09-in-f8.1e100.net:https (ESTABLISHED)
Google 932 m3 120u IPv4 0x23d51b9cd8bc8a1d 0t0 TCP 192.168.10.2:56209->ec2-52-38-23-71.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 121u IPv4 0x23d51b9cd61b5f75 0t0 TCP 192.168.10.2:56207->den16s09-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 122u IPv4 0x23d51b9cd8c62f75 0t0 TCP 192.168.10.2:56202->13.107.246.66:https (ESTABLISHED)
Google 932 m3 123u IPv4 0x23d51b9cd8be44c5 0t0 TCP 192.168.10.2:56214->edge.presslabs.net:https (ESTABLISHED)
Google 932 m3 124u IPv4 0x23d51b9cd8bdcf75 0t0 TCP 192.168.10.2:56204->104.18.231.83:https (ESTABLISHED)
Google 932 m3 126u IPv4 0x23d51b9cd8c5ea1d 0t0 TCP 192.168.10.2:56439->prebid.lax1.adnexus.net:https (ESTABLISHED)
Google 932 m3 127u IPv4 0x23d51b9cd8bb8a1d 0t0 TCP 192.168.10.2:56197->den16s09-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 128u IPv4 0x23d51b9cd8bf0f75 0t0 TCP 192.168.10.2:56216->server-143-204-29-52.den50.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 129u IPv4 0x23d51b9cd8cb2f75 0t0 TCP 192.168.10.2:56217->server-143-204-29-52.den50.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 130u IPv4 0x23d51b9cd8c55f75 0t0 TCP 192.168.10.2:56218->edge.presslabs.net:https (ESTABLISHED)
Google 932 m3 131u IPv4 0x23d51b9cd88e8f75 0t0 TCP 192.168.10.2:56231->a184-30-80-56.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 133u IPv4 0x23d51b9cd88fca1d 0t0 TCP 192.168.10.2:56226->den08s06-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 134u IPv4 0x23d51b9cd61bdf75 0t0 TCP 192.168.10.2:56228->151.101.68.157:https (ESTABLISHED)
Google 932 m3 135u IPv4 0x23d51b9cd88dff75 0t0 TCP 192.168.10.2:56229->den16s09-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 138u IPv4 0x23d51b9cd8bcc4c5 0t0 TCP 192.168.10.2:56232->a12b7a488abeaa9e4.awsglobalaccelerator.com:https (ESTABLISHED)
Google 932 m3 139u IPv4 0x23d51b9cd8befa1d 0t0 TCP 192.168.10.2:56224->192.0.76.3:https (ESTABLISHED)
Google 932 m3 140u IPv4 0x23d51b9cd8cb1a1d 0t0 TCP 192.168.10.2:55542->149.186.117.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 141u IPv4 0x23d51b9cd8bb5a1d 0t0 TCP 192.168.10.2:56233->den16s08-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 142u IPv4 0x23d51b9cd8bbf4c5 0t0 TCP 192.168.10.2:56244->xx-fbcdn-shv-01-den2.fbcdn.net:https (ESTABLISHED)
Google 932 m3 143u IPv4 0x23d51b9cd8c664c5 0t0 TCP 192.168.10.2:56225->den16s09-in-f8.1e100.net:https (ESTABLISHED)
Google 932 m3 147u IPv4 0x23d51b9cd8bb94c5 0t0 TCP 192.168.10.2:56236->8.39.36.196:https (ESTABLISHED)
Google 932 m3 149u IPv4 0x23d51b9cd8bb4f75 0t0 TCP 192.168.10.2:56234->den08s05-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 150u IPv4 0x23d51b9cd8bfb4c5 0t0 TCP 192.168.10.2:56235->ny-in-f157.1e100.net:https (ESTABLISHED)
Google 932 m3 151u IPv4 0x23d51b9cd8c6ff75 0t0 TCP 192.168.10.2:56237->den16s08-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 152u IPv4 0x23d51b9cd61baa1d 0t0 TCP 192.168.10.2:56243->den08s06-in-f8.1e100.net:https (ESTABLISHED)
Google 932 m3 153u IPv4 0x23d51b9cd8c63a1d 0t0 TCP 192.168.10.2:56203->server-65-8-234-2.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 154u IPv4 0x23d51b9cd8c75f75 0t0 TCP 192.168.10.2:56247->den08s06-in-f8.1e100.net:https (ESTABLISHED)
Google 932 m3 156u IPv4 0x23d51b9cd8bd8a1d 0t0 TCP 192.168.10.2:56238->a173-223-109-84.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 157u IPv4 0x23d51b9cd8bc5f75 0t0 TCP 192.168.10.2:56239->edge-star-mini-shv-01-den2.facebook.com:https (ESTABLISHED)
Google 932 m3 158u IPv4 0x23d51b9cd8c67a1d 0t0 TCP 192.168.10.2:56264->ec2-3-141-223-70.us-east-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 159u IPv4 0x23d51b9cd8c65a1d 0t0 TCP 192.168.10.2:56248->172.67.10.198:https (ESTABLISHED)
Google 932 m3 160u IPv4 0x23d51b9cd8bec4c5 0t0 TCP 192.168.10.2:56249->a23-204-161-157.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 161u IPv4 0x23d51b9cd8c56a1d 0t0 TCP 192.168.10.2:56285->server-108-156-201-15.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 162u IPv4 0x23d51b9cd8be6f75 0t0 TCP 192.168.10.2:56246->xx-fbcdn-shv-01-den2.fbcdn.net:https (ESTABLISHED)
Google 932 m3 164u IPv4 0x23d51b9cd88f0f75 0t0 TCP 192.168.10.2:55746->249.195.120.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 166u IPv4 0x23d51b9cd61d74c5 0t0 TCP 192.168.10.2:56250->241.155.149.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 167u IPv4 0x23d51b9cd8c52f75 0t0 TCP 192.168.10.2:56251->241.155.149.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 168u IPv4 0x23d51b9cd8c53a1d 0t0 TCP 192.168.10.2:56252->151.101.68.157:https (ESTABLISHED)
Google 932 m3 169u IPv4 0x23d51b9cd8900f75 0t0 TCP 192.168.10.2:56253->151.101.192.84:https (ESTABLISHED)
Google 932 m3 170u IPv4 0x23d51b9cd61c0f75 0t0 TCP 192.168.10.2:56265->server-108-156-201-59.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 172u IPv4 0x23d51b9cd61c1a1d 0t0 TCP 192.168.10.2:56266->server-18-154-101-38.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 173u IPv4 0x23d51b9cd8c73f75 0t0 TCP 192.168.10.2:56398->20.40.202.2:https (ESTABLISHED)
Google 932 m3 175u IPv4 0x23d51b9cd8c0f4c5 0t0 TCP 192.168.10.2:56271->a23-64-115-140.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 176u IPv4 0x23d51b9cd8c5aa1d 0t0 TCP 192.168.10.2:56259->server-65-8-243-18.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 177u IPv4 0x23d51b9cd8beda1d 0t0 TCP 192.168.10.2:56268->104.244.42.8:https (ESTABLISHED)
Google 932 m3 178u IPv4 0x23d51b9cd8bbea1d 0t0 TCP 192.168.10.2:56284->server-18-160-143-63.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 179u IPv4 0x23d51b9cd8cb84c5 0t0 TCP 192.168.10.2:56278->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 180u IPv4 0x23d51b9cd61d8a1d 0t0 TCP 192.168.10.2:56258->ec2-52-89-119-77.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 182u IPv4 0x23d51b9cd8c0ea1d 0t0 TCP 192.168.10.2:56270->ec2-54-183-94-204.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 184u IPv4 0x23d51b9cd8bcaf75 0t0 TCP 192.168.10.2:56272->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 185u IPv4 0x23d51b9cd8bb7f75 0t0 TCP 192.168.10.2:56273->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 186u IPv4 0x23d51b9cd8baaf75 0t0 TCP 192.168.10.2:56261->ingress-01-pub-prod-sv4.vip.dailymotion.com:https (ESTABLISHED)
Google 932 m3 187u IPv4 0x23d51b9cd8901a1d 0t0 TCP 192.168.10.2:56254->den16s08-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 188u IPv4 0x23d51b9cd88f1a1d 0t0 TCP 192.168.10.2:56256->a23-203-16-245.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 189u IPv4 0x23d51b9cd8cb7a1d 0t0 TCP 192.168.10.2:56257->a104-68-249-15.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 190u IPv4 0x23d51b9cd8bf64c5 0t0 TCP 192.168.10.2:56429->199.250.160.129:https (ESTABLISHED)
Google 932 m3 191u IPv4 0x23d51b9cd8c59f75 0t0 TCP 192.168.10.2:56260->den16s08-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 193u IPv4 0x23d51b9cd61b94c5 0t0 TCP 192.168.10.2:56262->ec2-3-223-101-77.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 194u IPv4 0x23d51b9cd8c574c5 0t0 TCP 192.168.10.2:56286->ec2-35-84-163-233.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 195u IPv4 0x23d51b9cd8c57f75 0t0 TCP 192.168.10.2:56275->ec2-52-34-82-134.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 196u IPv4 0x23d51b9cd61b7f75 0t0 TCP 192.168.10.2:56263->104.244.42.8:https (ESTABLISHED)
Google 932 m3 197u IPv4 0x23d51b9cd8c644c5 0t0 TCP 192.168.10.2:56276->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 198u IPv4 0x23d51b9cd8bbdf75 0t0 TCP 192.168.10.2:56283->ec2-54-70-27-34.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 199u IPv4 0x23d51b9cd8be8f75 0t0 TCP 192.168.10.2:56279->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 201u IPv4 0x23d51b9cd8bea4c5 0t0 TCP 192.168.10.2:56280->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 202u IPv4 0x23d51b9cd8bf2f75 0t0 TCP 192.168.10.2:56400->a23-204-161-157.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 203u IPv4 0x23d51b9cd88e9a1d 0t0 TCP 192.168.10.2:56281->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 204u IPv4 0x23d51b9cd88ea4c5 0t0 TCP 192.168.10.2:56282->server-18-154-101-86.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 205u IPv4 0x23d51b9cd88dea1d 0t0 TCP 192.168.10.2:56294->server-18-154-101-86.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 207u IPv4 0x23d51b9cd8c10f75 0t0 TCP 192.168.10.2:56393->152.199.5.228:https (ESTABLISHED)
Google 932 m3 208u IPv4 0x23d51b9cd61dd4c5 0t0 TCP 192.168.10.2:56299->den16s09-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 209u IPv4 0x23d51b9cd88e14c5 0t0 TCP 192.168.10.2:56290->ec2-44-232-10-218.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 211u IPv4 0x23d51b9cd8bd3a1d 0t0 TCP 192.168.10.2:56304->ec2-54-70-182-119.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 212u IPv4 0x23d51b9cd8beaf75 0t0 TCP 192.168.10.2:56300->ec2-44-197-62-77.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 213u IPv4 0x23d51b9cd8beba1d 0t0 TCP 192.168.10.2:56301->ec2-52-52-154-39.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 214u IPv4 0x23d51b9cd8cb24c5 0t0 TCP 192.168.10.2:56302->server-18-160-143-113.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 215u IPv4 0x23d51b9cd8c714c5 0t0 TCP 192.168.10.2:56297->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 216u IPv4 0x23d51b9cd8bfaa1d 0t0 TCP 192.168.10.2:56289->ec2-54-241-135-67.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 217u IPv4 0x23d51b9cd88e0a1d 0t0 TCP 192.168.10.2:56291->51.193.244.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 218u IPv4 0x23d51b9cd8bf04c5 0t0 TCP 192.168.10.2:56292->ec2-54-209-16-241.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 219u IPv4 0x23d51b9cd61d0f75 0t0 TCP 192.168.10.2:56293->ec2-54-193-80-202.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 221u IPv4 0x23d51b9cd88fd4c5 0t0 TCP 192.168.10.2:56324->a23-222-9-56.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 222u IPv4 0x23d51b9cd8c11a1d 0t0 TCP 192.168.10.2:56394->ec2-34-225-78-58.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 223u IPv4 0x23d51b9cd61d9f75 0t0 TCP 192.168.10.2:56306->151.101.2.114:https (ESTABLISHED)
Google 932 m3 224u IPv4 0x23d51b9cd88f8a1d 0t0 TCP 192.168.10.2:56310->104.26.12.18:https (ESTABLISHED)
Google 932 m3 225u IPv4 0x23d51b9cd8c70a1d 0t0 TCP 192.168.10.2:56296->ec2-54-70-182-119.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 226u IPv4 0x23d51b9cd88f5f75 0t0 TCP 192.168.10.2:56401->server-108-156-201-44.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 227u IPv4 0x23d51b9cd61dca1d 0t0 TCP 192.168.10.2:56298->74.119.118.149:https (ESTABLISHED)
Google 932 m3 228u IPv4 0x23d51b9cd88f94c5 0t0 TCP 192.168.10.2:56311->8.39.36.196:https (ESTABLISHED)
Google 932 m3 229u IPv4 0x23d51b9cd88ddf75 0t0 TCP 192.168.10.2:56295->8.39.36.142:https (ESTABLISHED)
Google 932 m3 231u IPv4 0x23d51b9cd8bb64c5 0t0 TCP 192.168.10.2:56312->8.39.36.196:https (ESTABLISHED)
Google 932 m3 232u IPv4 0x23d51b9cd8bf7f75 0t0 TCP 192.168.10.2:56433->bidder.da1.vip.prod.criteo.com:https (ESTABLISHED)
Google 932 m3 233u IPv4 0x23d51b9cd8cb0f75 0t0 TCP 192.168.10.2:56303->ec2-18-206-23-63.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 234u IPv4 0x23d51b9cd8cbb4c5 0t0 TCP 192.168.10.2:56309->ec2-52-86-42-47.compute-1.amazonaws.com:https (ESTABLISHED)
Google 932 m3 237u IPv4 0x23d51b9cd89024c5 0t0 TCP 192.168.10.2:56445->den16s09-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 238u IPv4 0x23d51b9cd8bde4c5 0t0 TCP 192.168.10.2:56313->8.39.36.196:https (ESTABLISHED)
Google 932 m3 239u IPv4 0x23d51b9cd8bdda1d 0t0 TCP 192.168.10.2:56314->8.39.36.196:https (ESTABLISHED)
Google 932 m3 240u IPv4 0x23d51b9cd61c5f75 0t0 TCP 192.168.10.2:56315->a23-222-9-56.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 241u IPv4 0x23d51b9cd61c74c5 0t0 TCP 192.168.10.2:56316->ec2-54-241-77-153.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 242u IPv4 0x23d51b9cd8c5df75 0t0 TCP 192.168.10.2:56317->bidder.da1.vip.prod.criteo.com:https (ESTABLISHED)
Google 932 m3 243u IPv4 0x23d51b9cd8c95a1d 0t0 TCP 192.168.10.2:56435->ec2-3-68-4-162.eu-central-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 244u IPv4 0x23d51b9cd8c08f75 0t0 TCP 192.168.10.2:56319->8.39.36.196:https (ESTABLISHED)
Google 932 m3 245u IPv4 0x23d51b9cd8bf4f75 0t0 TCP 192.168.10.2:56386->199.250.160.129:https (ESTABLISHED)
Google 932 m3 246u IPv4 0x23d51b9cd61db4c5 0t0 TCP 192.168.10.2:56334->a23-222-8-47.deploy.static.akamaitechnologies.com:https (ESTABLISHED)
Google 932 m3 247u IPv4 0x23d51b9cd8c06f75 0t0 TCP 192.168.10.2:56387->ip-216-169-156-30.rtbhouse.net:https (ESTABLISHED)
Google 932 m3 248u IPv4 0x23d51b9cd8be3a1d 0t0 TCP 192.168.10.2:56403->243.101.201.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 249u IPv4 0x23d51b9cd8bc74c5 0t0 TCP 192.168.10.2:56323->8.39.36.196:https (ESTABLISHED)
Google 932 m3 250u IPv4 0x23d51b9cd8be84c5 0t0 TCP 192.168.10.2:56404->den16s09-in-f10.1e100.net:https (ESTABLISHED)
Google 932 m3 251u IPv4 0x23d51b9cd88fbf75 0t0 TCP 192.168.10.2:56440->prebid.lax1.adnexus.net:https (ESTABLISHED)
Google 932 m3 252u IPv4 0x23d51b9cd8caea1d 0t0 TCP 192.168.10.2:56405->den08s06-in-f14.1e100.net:https (ESTABLISHED)
Google 932 m3 253u IPv4 0x23d51b9cd8bc94c5 0t0 TCP 192.168.10.2:56427->visit.keznews.com:https (ESTABLISHED)
Google 932 m3 254u IPv4 0x23d51b9cd61ccf75 0t0 TCP 192.168.10.2:56414->den08s05-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 255u IPv4 0x23d51b9cd61c3f75 0t0 TCP 192.168.10.2:56395->192-152-130-134.v4.mynextlight.net:https (ESTABLISHED)
Google 932 m3 256u IPv4 0x23d51b9cd61d5f75 0t0 TCP 192.168.10.2:56409->217.45.241.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 257u IPv4 0x23d51b9cd8bc3f75 0t0 TCP 192.168.10.2:55598->218.64.98.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 258u IPv4 0x23d51b9cd8bc7f75 0t0 TCP 192.168.10.2:56389->209.192.253.60:https (ESTABLISHED)
Google 932 m3 260u IPv4 0x23d51b9cd8bd04c5 0t0 TCP 192.168.10.2:56407->243.101.201.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 261u IPv4 0x23d51b9cd8bc4a1d 0t0 TCP 192.168.10.2:56330->172.64.151.101:https (ESTABLISHED)
Google 932 m3 262u IPv4 0x23d51b9cd8bd94c5 0t0 TCP 192.168.10.2:56360->8.43.72.32:https (ESTABLISHED)
Google 932 m3 264u IPv4 0x23d51b9cd8bf5a1d 0t0 TCP 192.168.10.2:56430->server-18-154-101-26.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 265u IPv4 0x23d51b9cd8bcfa1d 0t0 TCP 192.168.10.2:56340->dns.google:https (ESTABLISHED)
Google 932 m3 266u IPv4 0x23d51b9cd61b9f75 0t0 TCP 192.168.10.2:56441->prebid.lax1.adnexus.net:https (ESTABLISHED)
Google 932 m3 267u IPv4 0x23d51b9cd61bea1d 0t0 TCP 192.168.10.2:56431->199.250.162.129:https (ESTABLISHED)
Google 932 m3 268u IPv4 0x23d51b9cd8bc54c5 0t0 TCP 192.168.10.2:56442->prebid.lax1.adnexus.net:https (ESTABLISHED)
Google 932 m3 269u IPv4 0x23d51b9cd8caf4c5 0t0 TCP 192.168.10.2:56443->prebid.lax1.adnexus.net:https (ESTABLISHED)
Google 932 m3 270u IPv4 0x23d51b9cd8c594c5 0t0 TCP 192.168.10.2:56444->prebid.lax1.adnexus.net:https (ESTABLISHED)
Google 932 m3 271u IPv4 0x23d51b9cd8be2f75 0t0 TCP 192.168.10.2:56344->ec2-52-53-97-49.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 272u IPv4 0x23d51b9cd8bd7f75 0t0 TCP 192.168.10.2:56361->209.192.253.60:https (ESTABLISHED)
Google 932 m3 273u IPv4 0x23d51b9cd8bf8a1d 0t0 TCP 192.168.10.2:55594->103.252.227.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 274u IPv4 0x23d51b9cd61d6a1d 0t0 TCP 192.168.10.2:56408->den16s08-in-f2.1e100.net:https (ESTABLISHED)
Google 932 m3 276u IPv4 0x23d51b9cd88f44c5 0t0 TCP 192.168.10.2:56346->103.252.227.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 277u IPv4 0x23d51b9cd61bb4c5 0t0 TCP 192.168.10.2:56446->den16s09-in-f6.1e100.net:https (ESTABLISHED)
Google 932 m3 281u IPv4 0x23d51b9cd8bae4c5 0t0 TCP 192.168.10.2:55799->ip-185-184-8-90.rtbhouse.net:https (ESTABLISHED)
Google 932 m3 282u IPv4 0x23d51b9cd8bf44c5 0t0 TCP 192.168.10.2:56350->ec2-35-161-232-83.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 283u IPv4 0x23d51b9cd8bfdf75 0t0 TCP 192.168.10.2:56374->ec2-54-201-39-127.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 284u IPv4 0x23d51b9cd8c01f75 0t0 TCP 192.168.10.2:56369->199.250.160.129:https (ESTABLISHED)
Google 932 m3 285u IPv4 0x23d51b9cd61cda1d 0t0 TCP 192.168.10.2:56410->ec2-52-9-35-116.us-west-1.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 286u IPv4 0x23d51b9cd88eba1d 0t0 TCP 192.168.10.2:56370->199.250.160.129:https (ESTABLISHED)
Google 932 m3 287u IPv4 0x23d51b9cd88eaf75 0t0 TCP 192.168.10.2:56371->199.250.160.129:https (ESTABLISHED)
Google 932 m3 288u IPv4 0x23d51b9cd61c54c5 0t0 TCP 192.168.10.2:56356->204.237.133.116:https (ESTABLISHED)
Google 932 m3 289u IPv4 0x23d51b9cd8c9ba1d 0t0 TCP 192.168.10.2:56372->199.250.160.129:https (ESTABLISHED)
Google 932 m3 290u IPv4 0x23d51b9cd8bd44c5 0t0 TCP 192.168.10.2:56411->ec2-54-188-182-95.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 292u IPv4 0x23d51b9cd61b6a1d 0t0 TCP 192.168.10.2:56364->151.101.2.114:https (ESTABLISHED)
Google 932 m3 295u IPv4 0x23d51b9cd8c07a1d 0t0 TCP 192.168.10.2:56447->30.90.190.35.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 296u IPv4 0x23d51b9cd61bf4c5 0t0 TCP 192.168.10.2:56368->579.bm-nginx-loadbalancer.mgmt.nym2.adnexus.net:https (ESTABLISHED)
Google 932 m3 299u IPv4 0x23d51b9cd8bc0a1d 0t0 TCP 192.168.10.2:56379->server-65-8-243-84.den52.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 300u IPv4 0x23d51b9cd8bbff75 0t0 TCP 192.168.10.2:56380->server-18-154-110-39.ord58.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 301u IPv4 0x23d51b9cd8bf94c5 0t0 TCP 192.168.10.2:56432->visit.keznews.com:https (ESTABLISHED)
Google 932 m3 302u IPv4 0x23d51b9cd8caaa1d 0t0 TCP 192.168.10.2:56416->ec2-44-241-216-74.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
Google 932 m3 303u IPv4 0x23d51b9cd88fb4c5 0t0 TCP 192.168.10.2:56383->ip-216-169-156-30.rtbhouse.net:https (ESTABLISHED)
Google 932 m3 306u IPv4 0x23d51b9cd8bc6a1d 0t0 TCP 192.168.10.2:56419->server-13-33-252-86.den50.r.cloudfront.net:https (ESTABLISHED)
Google 932 m3 309u IPv4 0x23d51b9cd88faa1d 0t0 TCP 192.168.10.2:56422->199.250.160.129:https (ESTABLISHED)
Google 932 m3 311u IPv4 0x23d51b9cd8c09a1d 0t0 TCP 192.168.10.2:56424->211.87.96.34.bc.googleusercontent.com:https (ESTABLISHED)
Google 932 m3 313u IPv4 0x23d51b9cd8c9af75 0t0 TCP 192.168.10.2:56425->199.250.162.129:https (ESTABLISHED)
Google 932 m3 314u IPv4 0x23d51b9cd8bff4c5 0t0 TCP 192.168.10.2:56426->ec2-54-191-6-172.us-west-2.compute.amazonaws.com:https (ESTABLISHED)
I want all the ip adresses i am connected to
|
98356f9816b4f52a67958496a1ad49e4
|
{
"intermediate": 0.2912687361240387,
"beginner": 0.3998788595199585,
"expert": 0.30885231494903564
}
|
40,985
|
i want a script to show me the info on the ip adresses listed
|
cbe337cd25edb631664ba17f97ee34d9
|
{
"intermediate": 0.4577818810939789,
"beginner": 0.18511879444122314,
"expert": 0.3570992648601532
}
|
40,986
|
Write a minimal python script that converts the following API call from javascript:
params={
'prompt': "Hi",
'nprompt': "",
"steps": 44,
'gd': 5,
"style": "PHOTOGRAPHIC",
"width": 1024,
"height":1024,
"alchemy":true,
"pr":true
};
try {
const response = await fetch('https://l-d83c.onrender.com/leonardo', {
method: 'POST',
headers: {
'Content-Type': 'application/json',
'Connection': 'keep-alive',
},
body: JSON.stringify(params)
});
if (response.status === 429) {
pi.innerText = 'you have reached your limit of 50 generation per hour, you will get 50 more image generaions after an hour!!'
}
const data = await response.json();
localStorage.setItem("cookie", data.token);
localStorage.setItem("sub", data.sub);
localStorage.setItem("u", data.u);
console.log(data.result)
const div = document.getElementById('imshow')
const img = document.createElement('img');
img.id = "hehe"
pi.innerHTML = ''
pi.innerText = ''
img.src = data.result;
div.appendChild(img);
btn.disabled = false
isimg = true
} catch (error) {
btn.disabled = false
console.error('An error occurred:', error);
pi.innerText = 'An error occurred, try changing your prompt';
}
}
await Promise.all([generate()]);
|
b1ac668f99ff2d250bf1ec8d6ff077a5
|
{
"intermediate": 0.26426905393600464,
"beginner": 0.5834757685661316,
"expert": 0.15225516259670258
}
|
40,987
|
Make a simple gradio app UI for the following image generation API:
import requests
# Define the API endpoint and parameters
api_url = "https://l-d83c.onrender.com/leonardo"
payload = {
"prompt": "Latina, Photogenic, Golden Hour Lighting",
"nprompt": "",
"steps": 44,
"gd": 5,
"width": 832,
"height": 1216,
"alchemy": True,
"pr": True
}
# Makes a POST request to the API
try:
headers = {
"accept": "application/json",
"content-type": "application/json"
}
response = requests.post(api_url, json=payload, headers=headers)
# Check if we hit the rate limit
if response.status_code == 429:
print("You have reached your limit of 50 generation per hour, you will get 50 more image generations after an hour!!")
# Parse the JSON response
data = response.json()
# Print result URL
image_result = data.get("result")
if image_result:
print(f"Image URL: {image_result}")
except Exception as e:
print("An error occurred:", e)
|
a046e3fb50abe35354895cde21410837
|
{
"intermediate": 0.6331021785736084,
"beginner": 0.2368667721748352,
"expert": 0.130031019449234
}
|
40,988
|
what do you recommend as a font for my LaTeX resume?
|
c2e48c80a2d9ea9abc2f2adc14b72108
|
{
"intermediate": 0.3936966359615326,
"beginner": 0.3581515848636627,
"expert": 0.24815179407596588
}
|
40,989
|
what font do you recommend for my XeTeX Dev resume?
|
a2967badd7526ff607604c98a2d92ef5
|
{
"intermediate": 0.34471395611763,
"beginner": 0.31634315848350525,
"expert": 0.33894288539886475
}
|
40,990
|
Explain, in terms of types of elements involved, the difference between a covalent bond and an ionic bond.
Covalent molecules can be classified as polar or nonpolar. If polar substances dissolve in water and nonpolar substances do not, which covalent molecules on the table above are polar?
Look at the substances in your data table that you identified as having ionic bonds. What characteristics do these ionic compounds have in common that caused you to identify them as having ionic bonds?
Determine the electronegativity difference between sodium and chlorine AND draw the Lewis Dot Diagram of Calcium Fluoride (the compound that forms between calcium and fluoride).
Determine the electronegativity difference between sodium and chlorine AND draw the Lewis Dot Diagram of Potassium Iodide (the compound that forms between potassium and iodide).
Determine the electronegativity difference between potassium and oxygen AND draw the Lewis Dot Diagram of Sodium Sulfide (the compound that forms between sodium and sulfur).
Determine the electronegativity difference between aluminum and sulfur AND draw the Lewis Dot Diagram of Magnesium Nitride (the compound that forms between magnesium and nitrogen).
Determine the electronegativity difference between calcium and oxygen AND draw the Lewis Dot Diagram of Aluminum Nitride (the compound that forms between aluminum and nitrogen).
Determine the electronegativity difference between aluminum and sulfur AND draw the Lewis Dot Diagram of Aluminum Oxide (the compound that forms between aluminum and oxygen).
|
f02aeaba565128156fe66f2c650f9978
|
{
"intermediate": 0.2610419988632202,
"beginner": 0.41141554713249207,
"expert": 0.3275424540042877
}
|
40,991
|
I am making a device for anosmia patient to detect milk spoilage, and fruit spoilage So i buy some components for that which are Raspberry pi 3 model B, Arduino Uno R3, Gas sensors: MQ135, MQ7, MQ9, MQ3, DHT-11, and
when patient place the device on to the milk so the device should be capable to detect that the milk is fresh or spoil.
if i collect 2 datasets one is for milk fresh/spoilage, and other dataset is for fruit fresh/spoiled and in each dataset there are both 1 and 0 records .......so i want to make one model for these 2 dataset ...rather than making two models .....also i donot want to combine these two datasets to make one dataset.
Note: i want to use neural network model
|
47f98617a6a75121444cf75d9f27500b
|
{
"intermediate": 0.19714903831481934,
"beginner": 0.15285052359104156,
"expert": 0.6500004529953003
}
|
40,992
|
let lineCoords: [CLLocationCoordinate2D]
for marker in markers {
lineCoords.append(CLLocationCoordinate2D(latitude: marker.latitude, longitude: marker.longitude))
}
/Users/eboschert/Desktop/Keepers/Programming/TestApp/TestApp/MapViewController.swift:36:28: Constant 'lineCoords' passed by reference before being initialized
|
291b66f0899f220456f1ae0451b815b1
|
{
"intermediate": 0.46669694781303406,
"beginner": 0.31298398971557617,
"expert": 0.22031903266906738
}
|
40,993
|
what free font do you recommend for my XeTeX Dev resume?
|
74e80abf1269ff953ced9863e22bf267
|
{
"intermediate": 0.3609907627105713,
"beginner": 0.3150288164615631,
"expert": 0.3239804804325104
}
|
40,994
|
questo codice di seguito indicato da questo errore:
Traceback (most recent call last):
File "C:\Users\Utente\Desktop\Cassetto fiscale\Cassetto_con_opzioni_modifica.py", line 322, in <module>
href = link.get_attribute('href')
File "C:\Users\Utente\AppData\Local\Programs\Python\Python37\lib\site-packages\selenium\webdriver\remote\webelement.py", line 141, in get_attribute
self, name)
File "C:\Users\Utente\AppData\Local\Programs\Python\Python37\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 636, in execute_script
'args': converted_args})['value']
File "C:\Users\Utente\AppData\Local\Programs\Python\Python37\lib\site-packages\selenium\webdriver\remote\webdriver.py", line 321, in execute
self.error_handler.check_response(response)
File "C:\Users\Utente\AppData\Local\Programs\Python\Python37\lib\site-packages\selenium\webdriver\remote\errorhandler.py", line 242, in check_response
raise exception_class(message, screen, stacktrace)
selenium.common.exceptions.StaleElementReferenceException: Message: stale element reference: element is not attached to the page document
(Session info: chrome=109.0.5414.168)
>>>
# Import the required modules
from selenium import webdriver
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.common.by import By
from selenium.webdriver.common.action_chains import ActionChains
from selenium.webdriver.support import expected_conditions
from selenium.webdriver.support.wait import WebDriverWait
from selenium.webdriver.common.keys import Keys
from selenium.webdriver.common.desired_capabilities import DesiredCapabilities
from selenium.webdriver.support.ui import WebDriverWait
from selenium.common.exceptions import WebDriverException
from selenium.common import exceptions
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.chrome.options import Options
import re
import requests
import os
import time
import json
from bs4 import BeautifulSoup
# Main Function
if __name__ == "__main__":
download_folder = r'C:\Users\Utente\Desktop\Cassetto fiscale'
# Enable Performance Logging of Chrome.
desired_capabilities = DesiredCapabilities.CHROME
desired_capabilities["goog:loggingPrefs"] = {"performance": "ALL"}
# Create the webdriver object and pass the arguments
options = webdriver.ChromeOptions()
options.add_experimental_option('prefs', {
'download.default_directory': download_folder,
#'download.prompt_for_download': False,
#'download.directory_upgrade': True,
# 'plugins.plugins_disabled': ['Chrome PDF Viewer'],
# 'profile.default_content_settings.popups': 0,
# 'profile.content_settings.exceptions.automatic_downloads.*.setting': 1,
# 'safebrowsing.enabled': True
})
# Chrome will start in Headless mode
#options.add_argument('headless')
# Ignores any certificate errors if there is any
options.add_argument("--ignore-certificate-errors")
chromedriver_path = r"C:\Users\Utente\Desktop\PYTHON\chromedrive\chromedriver.exe"
# Startup the chrome webdriver with executable path and
# pass the chrome options and desired capabilities as
# parameters.
driver = webdriver.Chrome(chromedriver_path, desired_capabilities=desired_capabilities, options=options)
#driver = webdriver.Chrome(os.path.join(os.getcwd(),"Downloads","chromedriver"),options=options)
# Send a request to the website and let it load
driver.get("https://portale.agenziaentrate.gov.it/PortaleWeb/home")
cookies = driver.get_cookies()
for cookie in cookies:
print(cookie)
time.sleep(10)
element=driver.find_element(By.LINK_TEXT, "Credenziali").click()
element=driver.find_element_by_id ("username-fo-ent")
element.send_keys('')
element=driver.find_element_by_id("pin-fo-ent")
element.send_keys("")
element=driver.find_element_by_id("password-fo-ent")
element.send_keys("")
#driver.find_element_by_id( "page-footer").click()
element=driver.find_element(By.CSS_SELECTOR, ".px-md-5 > .btn-accedi").click()
cookies = driver.get_cookies()
for cookie in cookies:
print(cookie)
#Sleeps for 10 seconds
time.sleep(10)
# Gets all the logs from performance in Chrome
logs = driver.get_log("performance")
# Opens a writable JSON file and writes the logs in it
with open("network_log.json", "w", encoding="utf-8") as f:
f.write("[")
# Iterates every logs and parses it using JSON
for log in logs:
network_log = json.loads(log["message"])["message"]
# Checks if the current 'method' key has any
# Network related value.
if("Network.response" in network_log["method"]
or "Network.request" in network_log["method"]
or "Network.webSocket" in network_log["method"]):
# Writes the network log to a JSON file by
# converting the dictionary to a JSON string
# using json.dumps().
f.write(json.dumps(network_log)+",")
f.write("{}]")
# Read the JSON File and parse it using
# json.loads() to find the urls containing images.
json_file_path = "network_log.json"
with open(json_file_path, "r", encoding="utf-8") as f:
logs = json.loads(f.read())
# Iterate the logs
for log in logs:
# Except block will be accessed if any of the
# following keys are missing.
try:
# URL is present inside the following keys
url = log["params"]["request"]["url"]
if ("Network.responseReceived" in log["method"] and "requestId" in log["params"]):
print (log["params"]["requestId"])
# Checks if the extension is .png or .jpg
if url.find("https://portale.agenziaentrate.gov.it/portale-rest/rs/infouser")!=-1 or url.find( "https://portale.agenziaentrate.gov.it/portale-rest/rs/notifiche/certif")!=-1 :
#if url[len(url)-4:] != ".png" or url[len(url)-4:] != ".jpg":
print(url, end='\n\n')
print (log["params"]["requestId"])
try:
body = driver.execute_cdp_cmd('Network.getResponseBody', {'requestId': log["params"]["requestId"]})
#log['body'] = body
print (body)
with open("body.json", "a", encoding="utf-8") as f:
f.write("{ ")
f.write("urlresponse:[ ")
f.write(json.dumps(str(body)+","))
f.write("],")
f.write("{} }")
f.close()
#__________________________________________________
except exceptions.WebDriverException:
print('response.body is null')
#log = json.loads(log["message"])["message"]
if ("Network.responseReceived" in log["method"] and "requestId" in log["params"]):
for log in log["params"]["requestId"]:
#print(str(log))
try:
body = driver.execute_cdp_cmd('Network.getResponseBody', {'requestId': log["params"]["requestId"]})
#log['body'] = body
print (body)
with open("body.text", "w", encoding="utf-8") as f:
f.write(json.dumps(str(body)+","))
#__________________________________________________
except exceptions.WebDriverException:
print('response.body is null')
except Exception as e:
pass
driver.get("https://cassetto.agenziaentrate.gov.it/CassHomeWeb/home")
time.sleep(5)
driver.get("https://cassetto.agenziaentrate.gov.it/cassfisc-web")
time.sleep(5)
driver.get("https://cassetto.agenziaentrate.gov.it/cassfisc-web/CassettoFiscaleServlet?Ric=VERS")
time.sleep(5)
driver.get("https://cassetto.agenziaentrate.gov.it/cassfisc-web/CassettoFiscaleServlet?Ric=F24&Anno=2020")
#driver.get("https://cassetto.agenziaentrate.gov.it/cassfisc-web/CassettoFiscaleServlet?Ric=DetF24&Anno=2023&indice=0&dataDal=&dataAl=")
html_source = driver.page_source
# Close the browser (if needed)
# driver.quit()
# Parse the HTML using BeautifulSoup
soup = BeautifulSoup(html_source, 'html.parser')
# Find all the years within the specified section
section_years = soup.find('section', class_='section section-anni border border-primary')
years = []
if section_years:
# Find all the span elements with class 'chip-label' within the section
year_elements = section_years.find_all('span', class_='chip-label')
# Extract the text content (years) from the elements
years = [element.get_text(strip=True) for element in year_elements]
# Print the extracted years
print(years)
html_source = driver.page_source
# Close the browser (if needed)
# driver.quit()
# Parse the HTML using BeautifulSoup
soup = BeautifulSoup(html_source, 'html.parser')
# Find all the years within the specified section
section_years = soup.find('section', class_='section section-anni border border-primary')
years_data = []
if section_years:
# Trovare tutti gli elementi anchor con la classe 'chip-label' all'interno della sezione
year_elements = section_years.find_all('a', class_='btn btn-sm chip-btn')
for element in year_elements:
year_text = element.get_text(strip=True)
print (year_text)
# Saltare gli anni con "Documenti non presenti"
if "Documenti non presenti" not in year_text:
year_url = element['href']
years_data.append({'year': year_text, 'url': year_url})
# Iterare su gli anni e visitare il link specificato per ogni anno
for data in years_data:
year = data['year']
print (year)
url = data['url']#.replace("Ric=F24", "Ric=DetF24")
driver.get(url)
print(url)
# Aggiungi la tua logica qui per estrarre informazioni dalla pagina per ogni anno
# ...
# Trova tutti i righe (<tr>) all'interno del corpo della tabella (<tbody>)
rows = driver.find_elements(By.CSS_SELECTOR, "table.table-hover tbody tr")
# Andiamo a estrarre le informazioni da ogni riga
for row in rows:
# Ottieni tutti i dati delle colonne e mettiteli in una lista
data = [cell.text.strip() for cell in row.find_elements(By.TAG_NAME, "th")] + [cell.text.strip() for cell in row.find_elements(By.TAG_NAME, "td")]
# Trova tutti i link (se esistono) nella riga
links = [a.get_attribute('href') for a in row.find_elements(By.TAG_NAME, "a")]
# Stampa i dati a schermo
print(data + links)
numero=data[2]
print("Numero f24_________________ " + data[2])
# Attendere per evitare di sovraccaricare il server con le richieste
time.sleep(1)
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, "table-responsive")))
# Trova tutti i link di dettaglio nella tabella
dettaglio_links = driver.find_elements(By.XPATH, '//th[@headers="dettaglio"]/a')
indice=""
protocollo=""
# Percorre i link e salva le pagine corrispondenti
for link in dettaglio_links:
# Ottieni l'href del link
href = link.get_attribute('href')
text = link.text
print(text)
# Verifica se il testo contiene la parola "indice"
if "dettaglio del versamento" in text:
# Estrai il numero del versamento e l'indice
versamento = text.split('n.')[0].strip()
indice = str(text.split('n.')[1].split(' ')[0].strip())
# Stampa il numero del versamento e l'indice
print('Versamento:', versamento)
print('Indice:', indice)
# Ottieni il testo del protocollo corrispondente
protocollo = link.find_element(By.XPATH, './../../td[@headers="protocollo"]').text.replace('/', '_') + '_' + indice
# Costruisci il nome del file da usare per salvare la pagina HTML
print ("download pagina html")
file_name = f"{protocollo}.html"
file_path = os.path.join('C:/Users/Utente/Desktop/Cassetto fiscale', file_name)
if numero== 1:
# Apri il link in una nuova finestra/tab
driver.execute_script(f"window.open('{href}', '_blank');")
#driver.execute_script(f"window.open('{href}');")
driver.switch_to.window(driver.window_handles[0])
# Attendi il caricamento della pagina
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.TAG_NAME, "html")))
# Salva il contenuto della pagina
with open(file_path, 'w', encoding='utf-8') as f:
f.write(driver.page_source)
#fa il download degli f24 singoli
# Costruisci gli URL per i file PDF
url_stampap = href + '&stampa=P'
url_stampaq = href + '&stampa=Q'
# Salva i file PDF
for url_suffix, suffix in [(url_stampap, 'P'), (url_stampaq, 'Q')]:
pdf_file_name = f"{protocollo}_{suffix}.pdf"
# Vai all'URL e inizia il download del PDF
#driver.get(url_suffix)
cookies = driver.get_cookies()
session = requests.Session()
for cookie in cookies:
session.cookies.set(cookie['name'], cookie['value'])
with open(pdf_file_name, "wb") as f:
file_bytes = session.get(url_suffix).content
f.write(file_bytes)
driver.close()
driver.switch_to.window(window_handles[0])
driver.refresh()
time.sleep(5)
#controllo multi f24
else:
print("Più f24 insieme")
rows = driver.find_elements(By.CSS_SELECTOR, "table.table-hover tbody tr")
# Andiamo a estrarre le informazioni da ogni riga
for row in rows:
# Ottieni tutti i dati delle colonne e mettiteli in una lista
data = [cell.text.strip() for cell in row.find_elements(By.TAG_NAME, "th")] + [cell.text.strip() for cell in row.find_elements(By.TAG_NAME, "td")]
# Trova tutti i link (se esistono) nella riga
links = [a.get_attribute('href') for a in row.find_elements(By.TAG_NAME, "a")]
# Stampa i dati a schermo
print(data + links)
# Attendere per evitare di sovraccaricare il server con le richieste
time.sleep(1)
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.CLASS_NAME, "table-responsive")))
# Trova tutti i link di dettaglio nella tabella
dettaglio_links = driver.find_elements(By.XPATH, '//th[@headers="dettaglio"]/a')
# Percorre i link e salva le pagine corrispondenti
for link in dettaglio_links:
# Ottieni l'href del link
href = link.get_attribute('href')
text = link.text
if "dettaglio del versamento" in text:
# Estrai il numero del versamento e l'indice
versamento = text.split('n.')[0].strip()
indice = (text.split('n.')[1].split(' ')[0].strip())
# Ottieni il testo del protocollo corrispondente
prot_org=link.find_element(By.XPATH, './../../td[@headers="protocollo"]').text
protocollo = link.find_element(By.XPATH, './../../td[@headers="protocollo"]').text.replace('/', '_') + '_' + indice
# Costruisci il nome del file da usare per salvare la pagina HTML
print ("multi f24 ______________________________________")
print ("l'indice di riferimento è "+ indice)
print (protocollo)
file_name = f"{protocollo}.html"
file_path = os.path.join('C:/Users/Utente/Desktop/Cassetto fiscale', file_name)
# Apri il link in una nuova finestra/tab
print (href)
driver.execute_script(f"window.open('{href}', '_blak');")
window_handles = driver.window_handles
driver.switch_to.window(driver.window_handles[1])
# Seleziona la seconda scheda (indice 1) e chiudila
#if len(window_handles) > 1:
#driver.switch_to.window(window_handles[1])
#driver.switch_to.window(driver.window_handles[1])
# Attendi il caricamento della pagina
WebDriverWait(driver, 10).until(EC.presence_of_element_located((By.TAG_NAME, "html")))
# Salva il contenuto della pagina
with open(file_path, 'w', encoding='utf-8') as f:
f.write(driver.page_source)
# fa il download degli f24 multipli
# Costruisci gli URL per i file PDF
print (prot_org)
pattern = re.compile(r'(\d+)/(\d+)')
# Eseguiamo la ricerca con il pattern compilato
match = pattern.search(prot_org)
prot = match.group(1)
prog = match.group(2)
hrefm="https://cassetto.agenziaentrate.gov.it/cassfisc-web/CassettoFiscaleServlet?Ric=DetF24&Anno=&indice="+ indice + "&dataDal=&dataAl=&ProtTelem=" + prot + "&ProgrTelem=" + prog
url_stampap = hrefm + '&stampa=P'
url_stampaq = href + '&stampa=Q'
# Salva i file PDF
for url_suffix, suffix in [(url_stampap, 'P'), (url_stampaq, 'Q')]:
pdf_file_name = f"{protocollo}_{suffix}.pdf"
# Vai all'URL e inizia il download del PDF
#driver.get(url_suffix)
cookies = driver.get_cookies()
session = requests.Session()
for cookie in cookies:
session.cookies.set(cookie['name'], cookie['value'])
with open(pdf_file_name, "wb") as f:
file_bytes = session.get(url_suffix).content
f.write(file_bytes)
#driver.execute_script("window.history.go(-1)")
driver.close()
driver.switch_to.window(window_handles[0])
driver.refresh()
time.sleep(5)
# Chiudi il browser una volta completato il processo
# Chiudi la tab corrente e torna alla tab della tabella
#driver.close()
#driver.switch_to.window(driver.window_handles[0])
#driver.quit()
|
4c6ba8194ce3680479048b72596307b2
|
{
"intermediate": 0.3673543334007263,
"beginner": 0.42036911845207214,
"expert": 0.2122766077518463
}
|
40,995
|
if not self.ws:
await self.connect()
prompt_id = self.queue_prompt(prompt)['prompt_id']
outputs = []
async for out in self.ws:
try:
message = json.loads(out)
if message['type'] == 'execution_start':
currently_Executing_Prompt = message['data']['prompt_id']
if message['type'] == 'executing' and prompt_id == currently_Executing_Prompt:
data = message['data']
if data['node'] is None and data['prompt_id'] == prompt_id:
break
except ValueError as e:
print("Incompatible response from ComfyUI");
history = self.get_history(prompt_id)[prompt_id]
for node_id in history['outputs']:
node_output = history['outputs'][node_id]
for item in node_output:
match item:
case "text":
outputs[item].append(node_output['text'][0])
case "audio":
outputs[node_id] = self.get_audio(item)
case "music":
outputs[node_id] = self.get_music(item)
return outputs
# case "image":
# outputs[node_id] = self.get_image(item['filename'], item['subfolder'], item['type'])
# return outputs
case "images":
for image in node_output['images']:
image_data = self.get_image(image['filename'], image['subfolder'], image['type'])
if 'final_output' in image['filename']:
pil_image = Image.open(BytesIO(image_data))
outputs.append(pil_image)
return outputs
case _:
continue.
|
7fec1f5112fdb30fe8c707ca291fc84f
|
{
"intermediate": 0.17365600168704987,
"beginner": 0.7924922108650208,
"expert": 0.033851783722639084
}
|
40,996
|
implement main
open core
domains
gender = female(); man().
class facts
person : (string Name, gender Gender).
parent : (string Person, string Parent).
class predicates
father : (string Person, string Father) nondeterm anyflow.
clauses
father(Person, Father) :-
parent(Person, Father),
person(Father, man()).
class predicates
grand_nephew : (string Person, string GrandFather) nondeterm (o,o).
clauses
grand_nephew(Person, GrandFather) :-
parent(Person, Parent),
father(Parent, GrandFather).
class predicates
cusen : (string Person, string Cusen) nondeterm (i,o).
clauses
cusen(Person, Cusen) :-
parent(Person, Cusen).
cusen(Person, Cusen) :-
parent(Person, Parent),
cusen(Parent, Cusen).
clauses
person("Jessica",female()).
person("Mark",man()).
person("Judith",female()).
person("Bill",man()).
person("John",man()).
person("Pam",female()).
parent("John","Kim").
parent("Bill","John").
parent("Pam","Bill").
parent("Jessica","Den").
parent("Mark","Jessica").
parent("Pam","Mark").
clauses
run():-
console::init(),
stdIO:: write("\nРодитель\n"),
father(X, Y),
stdIO::writef("% - %\n", X, Y),
fail.
run():-
stdIO:: write("\nВнук\n"),
grand_nephew(X, Y),
stdIO:: writef("% - %\n", Y, X),
fail.
run():-
stdIO:: write("\nКузен\n"),
X = "Kim",
cusen(X, Y),
stdIO:: writef("% - %\n", Y, X),
fail.
run():-
programControl:: sleep(9000),
succeed().
end implement main
goal
mainExe::run(main::run).
Как сделать чтобы кузен был
|
5498b4fe1e32f1f7d61f9298d431075c
|
{
"intermediate": 0.3422203063964844,
"beginner": 0.35787147283554077,
"expert": 0.29990822076797485
}
|
40,997
|
how to make repository name visible in terminal
|
5db573ee735c0a1e1cc69dd22a26ff90
|
{
"intermediate": 0.35589247941970825,
"beginner": 0.24024362862110138,
"expert": 0.40386390686035156
}
|
40,998
|
how to write a simple rust lexer lexing python indentation
|
47dbb8773d27d997fa37acd08af8f3cc
|
{
"intermediate": 0.30344659090042114,
"beginner": 0.4572191834449768,
"expert": 0.23933421075344086
}
|
40,999
|
can u code?
|
97a13e56da8966f3f752990f48bc930b
|
{
"intermediate": 0.27656036615371704,
"beginner": 0.2844225764274597,
"expert": 0.43901705741882324
}
|
41,000
|
I am making a C++ SDL based game engine, and currently I am wrapping every SDL functionality. Help me finish my Font class, this is my current implementation:
public:
Font(const std::string& filePath);
virtual ~Font();
Font(const Font&) = delete;
Font& operator=(const Font&) = delete;
Font(Font&& other) noexcept;
Font& operator=(Font&& other) noexcept;
TTF_Font* GetFont(int size);
std::string GetFontPath() const;
private:
void ClearFonts();
std::string filePath;
std::unordered_map<int, TTF_Font*> fontSizes;
I want to make TTF_MeasureUTF8, but this has several functionalities on it, how can I approach this?
|
d5cf5ac3b6a19758c9c06094c9712b12
|
{
"intermediate": 0.5072010159492493,
"beginner": 0.3132478594779968,
"expert": 0.1795511692762375
}
|
41,001
|
the first useEffect seems to contradict with my globeComponent: import { motion } from "framer-motion";
import React, { useState, useEffect } from "react";
import { useRouter } from "next/router";
import ParticlesContainer from "./ParticlesContainer";
import { useGlobe } from '../components/globe/globeContext';
const Transition = () => {
const [isMobileView, setIsMobileView] = useState(false);
const [isLoaded, setIsLoaded] = useState(false);
const router = useRouter();
const { globeReady } = useGlobe();
let transitionDelay;
const baseDelay = router.pathname === '/' ? 0.2 : 0.2;
const transitionVariants = {
initial: {
x: "100%",
width: "100%",
},
animate: {
x: "0%",
width: "0%",
},
exit: {
x: ["0%", "100%"],
width: ["0%", "100%"],
},
};
useEffect(() => {
function handleResize() {
// Safeguarding window access
if (typeof window !== 'undefined') {
if (window.innerWidth < 768) {
setIsMobileView(true);
} else {
setIsMobileView(false);
}
}
}
// Since we're using window, we safeguard the entire effect
if (typeof window !== 'undefined') {
handleResize();
window.addEventListener('resize', handleResize);
}
// Cleanup
return () => {
if (typeof window !== 'undefined') {
window.removeEventListener('resize', handleResize);
}
};
}, []);
useEffect(() => {
// If we're on the homepage and globe is ready, set loaded to true
if (router.pathname === '/' && globeReady) {
setIsLoaded(true);
} else if(router.pathname!== '/') {
setIsLoaded(true);
}
}, [globeReady, router.pathname]);
if (router.pathname === '/' && !globeReady) {
transitionDelay = globeReady ? baseDelay : Infinity;
} else if (router.pathname !== '/' && !globeReady) {
transitionDelay = baseDelay;
}
return (
<>
{!isLoaded && (
<motion.div
role="status"
className="fixed top-0 bottom-0 right-full w-screen h-screen z-30 bg-gradient-to-tl from-violet-900 to-black-900"
variants={transitionVariants}
initial="initial"
animate="animate"
exit="exit"
transition={{ delay: transitionDelay, duration: 0.6, ease: "easeInOut" }}
aria-hidden="true"
>
<motion.img
src="/logo.svg"
alt="Descriptive text"
style={{
position: 'absolute',
top: '50%',
left: '50%',
transform: 'translate(-50%, -50%)',
width: '500px',
height: '500px'
}}
/>
<motion.img
src="/hand.gif"
alt="Descriptive text"
style={{
position: 'absolute',
top: '70%',
left: '50%',
transform: 'translate(-50%, -50%)',
width: '500px',
height: '500px'
}}
/>
<motion.div
className="pt-5 text-center sm:px-6 lg:px-8 font-bold"
style={{
position: 'absolute',
textAlign: 'center',
top: '100%',
left: '50%',
transform: 'translate(-50%, -50%)',
width: '500px',
height: '500px',
fontSize: '1.2rem',
}}>
loading ...
</motion.div>
{!isMobileView && (
<ParticlesContainer />
)}
</motion.div>
)}
<motion.div
role="status"
className="fixed top-0 bottom-0 right-full w-screen h-screen z-20 bg-gradient-to-tl from-violet-900 to-zinc-800"
variants={transitionVariants}
initial="initial"
animate="animate"
exit="exit"
transition={{ delay: 0.4, duration: 0.6, ease: "easeInOut" }}
aria-hidden
/>
<motion.div
role="status"
className="fixed top-0 bottom-0 right-full w-screen h-screen z-10 bg-gradient-to-tl from-violet-900 to-blue-600"
variants={transitionVariants}
initial="initial"
animate="animate"
exit="exit"
transition={{ delay: 0.6, duration: 0.6, ease: "easeInOut" }}
aria-hidden
/>
</>
);
};
export default Transition;
GlobeComponent: import React, { useEffect, useState, useRef } from 'react';
import Globe from 'react-globe.gl';
import * as THREE from 'three';
import { useRouter } from 'next/router';
import { useGlobe } from '../components/globe/globeContext';
import countries from './files/globe-data-min.json';
import travelHistory from './files/my-flights.json';
import airportHistory from './files/my-airports.json';
const GlobeComponent = () => {
const [arcsData, setArcsData] = useState([]);
const { setGlobeReady } = useGlobe();
const router = useRouter();
const globeRef = useRef();
const [dimensions, setDimensions] = useState({ width: 500, height: 500 });
useEffect(() => {
// Function to update the dimensions state based on the window size
const handleResize = () => {
if (window.matchMedia("(min-width: 960px)").matches) {
// If the screen width is larger than or equal to ‘lg’ breakpoint, set to desktop size
setDimensions({ width: 1000, height: 1000 });
} else {
// If smaller, set to mobile size
setDimensions({ width: 450, height: 450 });
}
};
// Set the initial globe size on component mount
handleResize();
// Add event listener for window resize
window.addEventListener('resize', handleResize);
// Remove event listener on cleanup
return () => window.removeEventListener('resize', handleResize);
}, []);
useEffect(() => {
// Check if the globe instance is loaded
if (globeRef && globeRef.current) {
const kenyaCoordinates = {
lat: 2.286389, // latitude for Nairobi, Kenya
lng: 18.82023 // longitude for Nairobi, Kenya
};
globeRef.current.pointOfView(kenyaCoordinates, 3000); // Animate the camera movement over 3000ms (3 seconds)
}
}, []);
useEffect(() => {
const handleRouteChange = () => {
// You could update state here to ensure fresh data is fetched when a route changes
setArcsData(travelHistory.flights);
};
router.events.on('routeChangeComplete', handleRouteChange);
// Fetch the initial data
setArcsData(travelHistory.flights);
return () => {
router.events.off('routeChangeComplete', handleRouteChange);
};
}, [router.events]);
return (
<Globe
ref={globeRef}
hexPolygonsData = { countries.features }
hexPolygonResolution = { 3}
hexPolygonMargin = {0.75}
hexPolygonAltitude = {0.01}
hexPolygonUseDots = {false}
enablePointerInteraction ={false}
onGlobeReady={() => setGlobeReady(true)}
showAtmosphere = { true}
atmosphereColor="#ffffff"
atmosphereAltitude = { 0.12}
backgroundColor="rgba(0, 0, 0, 0)"
globeImageUrl="//unpkg.com/three-globe/example/img/earth-dark.jpg"
hexPolygonColor = {(e) => {
return ["KEN", "CHN", "FRA", "ZAF", "JPN", "USA", "AUS", "CAN"].includes(e.properties.ISO_A3)
? "#9cff00"
: "rgba(255, 255, 255, 0.5)";
}}
width={dimensions.width}
height={dimensions.height}
arcsData={arcsData}
arcColor = {(e) => {
return e.status ? "#9cff00" : "#f13024";
}}
arcAltitude = {(e) => {
return e.arcAlt;
}}
arcStroke = {(e) => {
return e.status ? 0.5 : 0.3;
}}
arcDashLength = { 0.9}
arcDashGap = { 4}
arcDashAnimateTime = { 1000}
arcsTransitionDuration = { 1000}
arcDashInitialGap = {(e) => e.order * 1}
labelsData = { airportHistory.airports }
labelColor={() => "#ffffff"}
labelDotOrientation = {(e) => {
return e.text === "NGA" ? "top" : "right";
}}
labelDotRadius = { 0.35}
labelSize = {1.1}
labelText = {"city" }
labelResolution = { 6}
labelAltitude = { 0.07}
pointsData = { airportHistory.airports }
pointColor={() => "#ffffff"}
pointsMerge = { true}
pointAltitude = { 0.07}
pointRadius = { 0.10}
/>
);
};
export default GlobeComponent;
|
a7b956b0fbde0d72f6d90267f5187a0f
|
{
"intermediate": 0.36504095792770386,
"beginner": 0.4701841473579407,
"expert": 0.16477489471435547
}
|
41,002
|
-- Сторона клиента
-- Создаем переменные для хранения координат маркеров и их ID
local marker1 = {x = 1262.9248046875, y = -2025.0537109375, z = 59.336345672607, id = "infoaboutserver"}
local marker2 = {x = 1271.0048828125, y = -2025.578125, z = 59.095985412598, id = "infoaboutwork"}
-- Создаем маркеры на карте с помощью функции createMarker
local marker1Element = createMarker(marker1.x, marker1.y, marker1.z, "cylinder", 1.5, 255, 0, 0, 150)
local marker2Element = createMarker(marker2.x, marker2.y, marker2.z, "cylinder", 1.5, 255, 0, 0, 150)
-- Создаем переменные для хранения текста, который будет отображаться в окне DxGUI
local marker1Text = "Добро пожаловать на наш увлекательный сервер жанра выживания в зомби-апокалипсисе, сейчас мы начнём небольшое обучение тому, как играть здесь и выживать, что самое главное"
local marker2Text = "Это заброшенная деревня, здесь вы можете найти множество интересных и важных артефактов, скрытых персональных заданий и получить большой опыт. Но будьте осторожны, опасность подстерегает вас за каждым углом"
-- Создаем переменную для хранения текущего маркера, с которым взаимодействует игрок
local currentMarker = nil
-- Создаем функцию для отрисовки окна DxGUI с информацией о маркере
local function drawMarkerInfo()
-- Проверяем, что текущий маркер существует
if currentMarker then
-- Проверяем, что локальный игрок существует
if isElement(localPlayer) then
-- Получаем размеры экрана
local screenWidth, screenHeight = guiGetScreenSize()
-- Вычисляем размеры и координаты окна DxGUI
local windowWidth = screenWidth * 0.5 -- 50% от ширины экрана
local windowHeight = screenHeight * 0.3 -- 30% от высоты экрана
local windowX = (screenWidth - windowWidth) / 2 -- центрируем по горизонтали
local windowY = (screenHeight - windowHeight) / 2 -- центрируем по вертикали
-- Вычисляем размеры и координаты текста
local textWidth = windowWidth * 0.9 -- 90% от ширины окна
local textHeight = windowHeight * 0.7 -- 70% от высоты окна
local textX = windowX + (windowWidth - textWidth) / 2 -- центрируем по горизонтали
local textY = windowY + (windowHeight - textHeight) / 2 -- центрируем по вертикали
-- Вычисляем размеры и координаты кнопки
local buttonWidth = windowWidth * 0.2 -- 20% от ширины окна
local buttonHeight = windowHeight * 0.1 -- 10% от высоты окна
local buttonX = windowX + (windowWidth - buttonWidth) / 2 -- центрируем по горизонтали
local buttonY = windowY + windowHeight * 0.8 -- располагаем внизу окна
-- Рисуем окно DxGUI с помощью функции dxDrawRectangle
dxDrawRectangle(windowX, windowY, windowWidth, windowHeight, tocolor(0, 0, 0, 200)) -- черный цвет с прозрачностью 200
-- Рисуем текст с помощью функции dxDrawText
dxDrawText(currentMarker.text, textX, textY, textX + textWidth, textY + textHeight, tocolor(255, 255, 255, 255), 1, "default", "center", "center", true, true) -- белый цвет, размер 1, шрифт по умолчанию, выравнивание по центру, перенос по словам, клиппинг
-- Рисуем кнопку с помощью функции dxDrawRectangle
dxDrawRectangle(buttonX, buttonY, buttonWidth, buttonHeight, tocolor(255, 0, 0, 200)) -- красный цвет с прозрачностью 200
-- Рисуем текст на кнопке с помощью функции dxDrawText
dxDrawText("ПОНЯЛ", buttonX, buttonY, buttonX + buttonWidth, buttonY + buttonHeight, tocolor(255, 255, 255, 255), 1, "default", "center", "center") -- белый цвет, размер 1, шрифт по умолчанию, выравнивание по центру
-- Добавляем координаты кнопки к атрибутам текущего маркера
currentMarker.buttonX = buttonX
currentMarker.buttonY = buttonY
currentMarker.buttonWidth = buttonWidth
currentMarker.buttonHeight = buttonHeight
end
end
end
-- Создаем функцию для обработки события onClientMarkerHit, когда игрок входит в маркер
local function onMarkerHit(hitElement, matchingDimension)
-- Проверяем, что элемент, вошедший в маркер, это игрок и он находится в том же измерении, что и маркер
if hitElement == localPlayer and matchingDimension then
-- Проверяем, что игрок не находится в транспортном средстве
if not isPedInVehicle(localPlayer) then
-- Проверяем, какой маркер был активирован
if source == marker1Element then
-- Устанавливаем текущий маркер равным первому маркеру
currentMarker = marker1
elseif source == marker2Element then
-- Устанавливаем текущий маркер равным второму маркеру
currentMarker = marker2
end
-- Добавляем текст к текущему маркеру в зависимости от его ID
if currentMarker.id == "infoaboutserver" then
currentMarker.text = marker1Text
elseif currentMarker.id == "infoaboutwork" then
currentMarker.text = marker2Text
end
-- Добавляем текст к текущему маркеру в зависимости от его ID
if currentMarker.id == "infoaboutserver" then
currentMarker.text = marker1Text
elseif currentMarker.id == "infoaboutwork" then
currentMarker.text = marker2Text
end
-- Добавляем обработчик события onClientRender, чтобы отрисовывать окно DxGUI
addEventHandler("onClientRender", root, drawMarkerInfo)
-- Включаем курсор мыши
showCursor(true)
end
end
end
-- Создаем функцию для обработки события onClientMarkerLeave, когда игрок выходит из маркера
local function onClick(button, state, absoluteX, absoluteY)
-- Проверяем, что кнопка мыши это левая и состояние это нажатие
if button == "left" and state == "down" then
-- Проверяем, что текущий маркер существует
if currentMarker then
-- Получаем координаты кнопки из атрибутов текущего маркера
local buttonX = currentMarker.buttonX
local buttonY = currentMarker.buttonY
local buttonWidth = currentMarker.buttonWidth
local buttonHeight = currentMarker.buttonHeight
-- Проверяем, что координаты клика мыши попадают в область кнопки
if absoluteX >= buttonX and absoluteX <= buttonX + buttonWidth and absoluteY >= buttonY and absoluteY <= buttonY + buttonHeight then
outputChatBox("Вы нажали на кнопку", localPlayer, 255, 255, 255)
-- Удаляем обработчик события onClientRender, чтобы перестать отрисовывать окно DxGUI
removeEventHandler("onClientRender", root, drawMarkerInfo)
-- Выключаем курсор мыши
showCursor(false)
-- Обнуляем текущий маркер
currentMarker = nil
end
end
end
end
addEventHandler("onClientClick", root, onClick) -- Добавляем обработчик события onClientClick
-- Создаем функцию для обработки события onClientClick, когда игрок нажимает на кнопку мыши
local function onClick(button, state, absoluteX, absoluteY, buttonX, buttonY, buttonWidth, buttonHeight)
-- Проверяем, что кнопка мыши это левая и состояние это нажатие
if button == "left" and state == "down" then
-- Проверяем, что текущий маркер существует
if currentMarker then
-- Проверяем, что координаты клика мыши попадают в область кнопки, используя координаты окна DxGUI, переданные через аргументы
if absoluteX >= buttonX and absoluteX <= buttonX + buttonWidth and absoluteY >= buttonY and absoluteY <= buttonY + buttonHeight then
outputChatBox("Вы нажали на кнопку", localPlayer, 255, 255, 255)
-- Удаляем обработчик события onClientRender, чтобы перестать отрисовывать окно DxGUI
removeEventHandler("onClientRender", root, drawMarkerInfo)
-- Удаляем обработчик события onClientClick, чтобы перестать обрабатывать нажатие на кнопку
removeEventHandler("onClientClick", root, onClick)
-- Выключаем курсор мыши
showCursor(false)
-- Обнуляем текущий маркер
currentMarker = nil
end
end
end
end
-- Добавляем обработчики событий для маркеров
addEventHandler("onClientMarkerHit", marker1Element, onMarkerHit)
addEventHandler("onClientMarkerHit", marker2Element, onMarkerHit)
addEventHandler("onClientMarkerLeave", marker1Element, onMarkerLeave)
addEventHandler("onClientMarkerLeave", marker2Element, onMarkerLeave)
Как изменить внешний вид маркеров в этом коде для MTA SAN ANDREAS, мне нужно добавить на них свою картинку, формата png
|
83af13c57ee31913af3e54a62e9664b3
|
{
"intermediate": 0.3761155605316162,
"beginner": 0.4012448489665985,
"expert": 0.22263960540294647
}
|
41,003
|
-- Сторона клиента
-- Создаем переменные для хранения координат маркеров и их ID
local marker1 = {x = 1262.9248046875, y = -2025.0537109375, z = 59.336345672607, id = "infoaboutserver"}
local marker2 = {x = 1271.0048828125, y = -2025.578125, z = 59.095985412598, id = "infoaboutwork"}
-- Создаем маркеры на карте с помощью функции createMarker
local marker1Element = createMarker(marker1.x, marker1.y, marker1.z, "cylinder", 1.5, 255, 0, 0, 150)
local marker2Element = createMarker(marker2.x, marker2.y, marker2.z, "cylinder", 1.5, 255, 0, 0, 150)
-- Создаем переменные для хранения текста, который будет отображаться в окне DxGUI
local marker1Text = "Добро пожаловать на наш увлекательный сервер жанра выживания в зомби-апокалипсисе, сейчас мы начнём небольшое обучение тому, как играть здесь и выживать, что самое главное"
local marker2Text = "Это заброшенная деревня, здесь вы можете найти множество интересных и важных артефактов, скрытых персональных заданий и получить большой опыт. Но будьте осторожны, опасность подстерегает вас за каждым углом"
-- Создаем переменную для хранения текущего маркера, с которым взаимодействует игрок
local currentMarker = nil
-- Создаем функцию для отрисовки окна DxGUI с информацией о маркере
local function drawMarkerInfo()
-- Проверяем, что текущий маркер существует
if currentMarker then
-- Проверяем, что локальный игрок существует
if isElement(localPlayer) then
-- Получаем размеры экрана
local screenWidth, screenHeight = guiGetScreenSize()
-- Вычисляем размеры и координаты окна DxGUI
local windowWidth = screenWidth * 0.5 -- 50% от ширины экрана
local windowHeight = screenHeight * 0.3 -- 30% от высоты экрана
local windowX = (screenWidth - windowWidth) / 2 -- центрируем по горизонтали
local windowY = (screenHeight - windowHeight) / 2 -- центрируем по вертикали
-- Вычисляем размеры и координаты текста
local textWidth = windowWidth * 0.9 -- 90% от ширины окна
local textHeight = windowHeight * 0.7 -- 70% от высоты окна
local textX = windowX + (windowWidth - textWidth) / 2 -- центрируем по горизонтали
local textY = windowY + (windowHeight - textHeight) / 2 -- центрируем по вертикали
-- Вычисляем размеры и координаты кнопки
local buttonWidth = windowWidth * 0.2 -- 20% от ширины окна
local buttonHeight = windowHeight * 0.1 -- 10% от высоты окна
local buttonX = windowX + (windowWidth - buttonWidth) / 2 -- центрируем по горизонтали
local buttonY = windowY + windowHeight * 0.8 -- располагаем внизу окна
-- Рисуем окно DxGUI с помощью функции dxDrawRectangle
dxDrawRectangle(windowX, windowY, windowWidth, windowHeight, tocolor(0, 0, 0, 200)) -- черный цвет с прозрачностью 200
-- Рисуем текст с помощью функции dxDrawText
dxDrawText(currentMarker.text, textX, textY, textX + textWidth, textY + textHeight, tocolor(255, 255, 255, 255), 1, "default", "center", "center", true, true) -- белый цвет, размер 1, шрифт по умолчанию, выравнивание по центру, перенос по словам, клиппинг
-- Рисуем кнопку с помощью функции dxDrawRectangle
dxDrawRectangle(buttonX, buttonY, buttonWidth, buttonHeight, tocolor(255, 0, 0, 200)) -- красный цвет с прозрачностью 200
-- Рисуем текст на кнопке с помощью функции dxDrawText
dxDrawText("ПОНЯЛ", buttonX, buttonY, buttonX + buttonWidth, buttonY + buttonHeight, tocolor(255, 255, 255, 255), 1, "default", "center", "center") -- белый цвет, размер 1, шрифт по умолчанию, выравнивание по центру
-- Добавляем координаты кнопки к атрибутам текущего маркера
currentMarker.buttonX = buttonX
currentMarker.buttonY = buttonY
currentMarker.buttonWidth = buttonWidth
currentMarker.buttonHeight = buttonHeight
end
end
end
-- Создаем функцию для обработки события onClientMarkerHit, когда игрок входит в маркер
local function onMarkerHit(hitElement, matchingDimension)
-- Проверяем, что элемент, вошедший в маркер, это игрок и он находится в том же измерении, что и маркер
if hitElement == localPlayer and matchingDimension then
-- Проверяем, что игрок не находится в транспортном средстве
if not isPedInVehicle(localPlayer) then
-- Проверяем, какой маркер был активирован
if source == marker1Element then
-- Устанавливаем текущий маркер равным первому маркеру
currentMarker = marker1
elseif source == marker2Element then
-- Устанавливаем текущий маркер равным второму маркеру
currentMarker = marker2
end
-- Добавляем текст к текущему маркеру в зависимости от его ID
if currentMarker.id == "infoaboutserver" then
currentMarker.text = marker1Text
elseif currentMarker.id == "infoaboutwork" then
currentMarker.text = marker2Text
end
-- Добавляем обработчик события onClientRender, чтобы отрисовывать окно DxGUI
addEventHandler("onClientRender", root, drawMarkerInfo)
-- Включаем курсор мыши
showCursor(true)
end
end
end
local function onMarkerLeave(leftElement, matchingDimension)
-- Проверяем, что элемент, покинувший маркер, это игрок и он находится в том же измерении, что и маркер
if leftElement == localPlayer and matchingDimension then
-- Удаляем обработчик события onClientRender
removeEventHandler("onClientRender", root, drawMarkerInfo)
-- Выключаем курсор мыши
showCursor(false)
-- Обнуляем текущий маркер
currentMarker = nil
end
end
-- Создаем функцию для обработки события onClientMarkerLeave, когда игрок выходит из маркера
local function onClick(button, state, absoluteX, absoluteY)
-- Проверяем, что кнопка мыши это левая и состояние это нажатие
if button == "left" and state == "down" then
-- Проверяем, что текущий маркер существует
if currentMarker then
-- Получаем координаты кнопки из атрибутов текущего маркера
local buttonX = currentMarker.buttonX
local buttonY = currentMarker.buttonY
local buttonWidth = currentMarker.buttonWidth
local buttonHeight = currentMarker.buttonHeight
-- Проверяем, что координаты клика мыши попадают в область кнопки
if absoluteX >= buttonX and absoluteX <= buttonX + buttonWidth and absoluteY >= buttonY and absoluteY <= buttonY + buttonHeight then
outputChatBox("Вы нажали на кнопку", localPlayer, 255, 255, 255)
-- Удаляем обработчик события onClientRender, чтобы перестать отрисовывать окно DxGUI
removeEventHandler("onClientRender", root, drawMarkerInfo)
-- Выключаем курсор мыши
showCursor(false)
-- Обнуляем текущий маркер
currentMarker = nil
end
end
end
end
addEventHandler("onClientClick", root, onClick) -- Добавляем обработчик события onClientClick
-- Добавляем обработчики событий для маркеров
addEventHandler("onClientMarkerHit", marker1Element, onMarkerHit)
addEventHandler("onClientMarkerHit", marker2Element, onMarkerHit)
addEventHandler("onClientMarkerLeave", marker1Element, onMarkerLeave)
addEventHandler("onClientMarkerLeave", marker2Element, onMarkerLeave)
Найди баги и ошибки, логические упущения в этом скрипте и скажи как исправить
|
e365d3af143ada789baec07821d297e4
|
{
"intermediate": 0.3761155605316162,
"beginner": 0.4012448489665985,
"expert": 0.22263960540294647
}
|
41,004
|
We're going to make a very advanced forge 1.8.9 mod that's in kotlin.
The mod should be able to load modules that are coded in JS using nashorn. The modules should be attatched to a NodeJS interface so I can interface with the JS modules and make it it so it prints into the nodejs terminal.
|
710da7f53436ecb4cad7364950a9a767
|
{
"intermediate": 0.4640570878982544,
"beginner": 0.27964073419570923,
"expert": 0.25630223751068115
}
|
41,005
|
give me the recommended margins for my XeTeX resume
|
c80e275d40f49535c310d6ac9af96cc9
|
{
"intermediate": 0.35479187965393066,
"beginner": 0.335994690656662,
"expert": 0.30921342968940735
}
|
41,006
|
Give me an overview of what to put inside every page of my presentation about My city "Marrakech", if you can code that, you would make my life a lot easier
|
c477dc98268a2329e0d4b7015d3b92f6
|
{
"intermediate": 0.36164864897727966,
"beginner": 0.34048372507095337,
"expert": 0.2978675961494446
}
|
41,007
|
Format this properly for markdown mode reddit post
I’m casting my message into the vast digital sea in hopes of finding someone who understands the language of the heart, someone who seeks a bond that’s as authentic as it is profound.
What you should know about me is, a love of learning that’s as insatiable as my quest for a soulmate. I seek someone who isn’t afraid to dream with me, a partner in crafting a family, our own piece of eternity. I envision playing games in the backyard, story time voices voiced in characters, and guiding little hearts to find their place in the world with kindness and courage.
I’m a romantic soul, living amidst a backdrop of art and history that could rival any storyteller’s grandest design. The depth of my love seeks a match in genuine connection, one that grows from seeds of respect, joy and an appreciation for the unique quirks that define us. To me, every ancient structure around is a lesson that true love should evolve continually, far from the static nature of relics.
About me: Envision me, if you will, a tall, slim figure with a beard that’s just the right mix of neat and rugged, deep brown and warm eyes. Fair warning: If you're shallow on looks, you may skip on me. I personally find some beauty in everyone, there's something unique and cherishable in all of us humans.
Here’s what I can promise you:
\\- Candlelit dinners, not just on special occasions, but “just because” days.
\\- Genuine conversations that dive deep into our beliefs, hopes, and even fears.
\\- Exploration, not just of the map, but of the intellectual and emotional landscapes that define us.
\\- A strength that’s not just physical, but emotional, ready to support you in every endeavor.
\\- Romance that’s not just in the grand gestures, but in the small, everyday acts of caring.
Ideal Match:
I’m searching for someone who equally enjoys the quiet, introspective life, where a Saturday night might be spent in the glow of a shared screen, engaging in co-op gameplay, or dissecting the plot of a TV series with as much intensity as its creators. Your idea of an adventure might be exploring new realms in a fantasy novel or travelling around the world.
Perhaps you, like me, find comfort in routines and the small joys they bring. You take your coffee with a side of contemplation and your conversations with a depth that reflects your inner thoughts. You don’t shy away from commitment; instead, you’re drawn to its stability and the closeness it fosters.
My hope is to meet someone who understands that my clinginess is a sign of my devotion and cherishes the care and protection I naturally extend to those I love. You value emotional intimacy and the strength that comes from mutual support and understanding. Instead of large social gatherings, you treasure intimate moments that allow vulnerability and true connection.
You might find yourself a bit clingy, too, desiring a partner who’s always ready to listen and engage wholeheartedly. You find beauty in shared silence and believe that a look can convey more than words. Most importantly, you’re looking for a lasting connection, a partnership that evolves and grows stronger with each challenge and triumph shared.
If you’re out there, and this resonates with the song your heart has been quietly humming, then let’s begin this symphony together. Send a message, and let’s find out if our solos are meant to harmonize.
|
3f309a6e3b0c3cf8d0f79607613f47a7
|
{
"intermediate": 0.2311764657497406,
"beginner": 0.5145533084869385,
"expert": 0.2542702555656433
}
|
41,008
|
In Python playwright how do I save the full html of a locator (including all children)
|
c20346a6ec2da9897b37d3619946743b
|
{
"intermediate": 0.5553153157234192,
"beginner": 0.17801141738891602,
"expert": 0.2666732966899872
}
|
41,010
|
how to do from hex of this 1f397b272722252c140a253f7a140f78293e0c0c2e19143f39222820311419142d3e256a and XOR with 4B hex in Python ?
|
6dae819326bc2933d36c5e21360e7479
|
{
"intermediate": 0.4769166111946106,
"beginner": 0.17160171270370483,
"expert": 0.35148167610168457
}
|
41,011
|
how to do from hex of this 1f397b272722252c140a253f7a140f78293e0c0c2e19143f39222820311419142d3e256a and XOR with 4B hex in Python ? result shoud be ascii text. not hex
|
b66f113f565d52bb96165c0208a78a41
|
{
"intermediate": 0.4866630434989929,
"beginner": 0.19477078318595886,
"expert": 0.3185661733150482
}
|
41,012
|
I think I need a way to render this polyline to the screen. How do I do that from here
//
// MapViewController.swift
// TestApp
//
// Created by Erich Boschert on 3/1/24.
//
import UIKit
import MapKit
class MapViewController: UIViewController {
@IBAction func onBack(_ sender: Any) {
dismiss(animated: true)
}
private let map: MKMapView = {
return MKMapView()
}()
override func viewDidLoad() {
print("hi")
super.viewDidLoad()
view.insertSubview(map, at: 0)
// Add pins for markers
let markers = Marker.getMarkers()
if(markers.count == 1) {
let marker = markers[0]
let annotation = MKPointAnnotation()
annotation.coordinate = CLLocationCoordinate2D(latitude: marker.latitude, longitude: marker.longitude)
map.addAnnotation(annotation);
} else {
var lineCoords = [CLLocationCoordinate2D]()
for marker in markers {
let hi = Double.random(in: (-90)..<90)
let hi2 = Double.random(in: (-180)..<180)
lineCoords.append(CLLocationCoordinate2D(latitude: hi, longitude: hi2))
}
let line = MKPolyline(coordinates: lineCoords, count: lineCoords.count)
map.addOverlay(line);
}
}
override func viewDidLayoutSubviews() {
super.viewDidLayoutSubviews()
map.frame = view.bounds;
}
}
|
e25bbd19386fafdb44ee5000cee35725
|
{
"intermediate": 0.5706433057785034,
"beginner": 0.23134049773216248,
"expert": 0.1980161815881729
}
|
41,013
|
import time
from queue import Queue
from threading import Thread
from tkinter import ttk, Tk, StringVar, W, EW, SUNKEN
from decimal import Decimal, getcontext, Overflow
import ast
import operator as op
# Safe evaluation setup
allowed_operators = {
ast.Add: op.add, ast.Sub: op.sub, ast.Mult: op.mul,
ast.Div: op.truediv, ast.Pow: op.pow, ast.BitXor: op.xor,
ast.USub: op.neg
}
def safe_eval(expr):
"""
Safely evaluate an arithmetic expression using AST.
"""
def eval_(node):
if isinstance(node, ast.Num): # <number>
return Decimal(node.n)
elif isinstance(node, ast.BinOp): # <left> <operator> <right>
return allowed_operators[type(node.op)](eval_(node.left), eval_(node.right))
elif isinstance(node, ast.UnaryOp): # <operator> <operand> e.g., -1
return allowed_operators[type(node.op)](eval_(node.operand))
else:
raise TypeError(node)
return eval_(ast.parse(expr, mode='eval').body)
def format_scientific(number: Decimal, digit_limit: int) -> str:
"""Format the number in scientific notation if it exceeds digit_limit."""
try:
if len(str(number).replace('.', '').replace('-', '')) > digit_limit:
return "{:.{}e}".format(number, digit_limit - 1)
return str(number)
except Overflow:
# Handle numbers that are too large
return number.to_eng_string()
class ExpressionParser:
"""Class to parse and evaluate mathematical expressions safely."""
def __init__(self) -> None:
self.precision = 10000 # Set high precision for large numbers
def evaluate(self, expression: str, digit_limit: int) -> str:
"""Parse an expression and evaluate the result safely."""
getcontext().prec = self.precision
try:
result = safe_eval(expression)
return format_scientific(result, digit_limit)
except Exception as e:
raise ValueError(f"Invalid expression: {e}") from e
class GuiUpdater:
"""Class to update GUI with results and status."""
def __init__(self, root: Tk, result_var: StringVar, status_var: StringVar, entry_var: StringVar, digit_limit_var: StringVar) -> None:
self.root = root
self.result_var = result_var
self.status_var = status_var
self.entry_var = entry_var
self.digit_limit_var = digit_limit_var
self.queue: "Queue[Tuple[str, float]]" = Queue()
self.start()
def start(self) -> None:
"""Start background thread to process result queue."""
self.worker = Thread(target=self.process_queue, daemon=True)
self.worker.start()
def process_queue(self) -> None:
"""Get results from queue and update GUI."""
while True:
result, duration = self.queue.get()
self.update_result(result)
self.update_status(f"Computation Time: {duration:.2f} sec")
self.root.update_idletasks()
def update_result(self, result: str) -> None:
"""Update result label with calculation result."""
self.result_var.set(f"Result: {result}")
def update_status(self, status: str) -> None:
"""Update status bar with latest status."""
self.status_var.set(status)
class LargeNumberCalculatorApp:
"""Main application class to coordinate between GUI and calculation."""
def __init__(self, root: Tk) -> None:
self.root = root
self.parser = ExpressionParser()
self.result_var = StringVar()
self.status_var = StringVar()
self.entry_var = StringVar()
self.digit_limit_var = StringVar(value="10") # Default digit limit
self.gui = self.build_gui()
def build_gui(self) -> GuiUpdater:
"""Build GUI elements."""
entry = ttk.Entry(self.root, textvariable=self.entry_var)
entry.grid(row=0, column=1, sticky=EW)
calculate_button = ttk.Button(self.root, text="Calculate", command=self.calculate)
calculate_button.grid(row=1, column=1, sticky=W)
clear_button = ttk.Button(self.root, text="Clear", command=lambda: self.entry_var.set(""))
clear_button.grid(row=1, column=2, sticky=W)
digit_limit_label = ttk.Label(self.root, text="Digit Limit:")
digit_limit_label.grid(row=0, column=2)
digit_limit_entry = ttk.Entry(self.root, textvariable=self.digit_limit_var)
digit_limit_entry.grid(row=0, column=3)
result_label = ttk.Label(self.root, textvariable=self.result_var)
result_label.grid(row=2, column=1, sticky=W)
status_label = ttk.Label(self.root, textvariable=self.status_var, relief=SUNKEN, anchor=W)
status_label.grid(row=3, column=0, columnspan=4, sticky=EW)
return GuiUpdater(self.root, self.result_var, self.status_var, self.entry_var, self.digit_limit_var)
def calculate(self) -> None:
"""Handler for calculate button click, modified for asynchronous execution."""
expression = self.entry_var.get()
digit_limit = int(self.digit_limit_var.get())
def async_calculate():
start = time.perf_counter()
try:
result = self.parser.evaluate(expression, digit_limit)
except Exception as e:
result = f"Error: {e}"
duration = time.perf_counter() - start
# Send result to GUI update handler in the main thread
self.root.after(0, lambda: self.gui.queue.put((result, duration)))
# Run calculation in a separate thread
Thread(target=async_calculate, daemon=True).start()
if __name__ == "__main__":
root = Tk()
app = LargeNumberCalculatorApp(root)
root.mainloop()
Write a 2.0 of this. You have full freedom to change anything here, including the framework.
|
9b9c7550c394b65c09478d004227ff7d
|
{
"intermediate": 0.422763466835022,
"beginner": 0.32805296778678894,
"expert": 0.2491835653781891
}
|
41,014
|
how to set fastopen option for tcp socket for server on Linux?
|
199f1bf622a64a2376d95fc7b54fd470
|
{
"intermediate": 0.50034499168396,
"beginner": 0.21429911255836487,
"expert": 0.28535595536231995
}
|
41,015
|
i want yt-dlp to query an url until format 251 is available then download it
|
e31fb8be57503af40c596ff3c1096176
|
{
"intermediate": 0.5165940523147583,
"beginner": 0.23927360773086548,
"expert": 0.24413228034973145
}
|
41,016
|
i want yt-dlp to query an url until 251 format is available and if yes download it
|
fa44fcbbb463d4ede5fd53533b4968d7
|
{
"intermediate": 0.43068382143974304,
"beginner": 0.2803628146648407,
"expert": 0.2889533042907715
}
|
41,017
|
func showMarkers(markers: [Marker], value: Float) {
// Remove all current overlays
map.removeOverlays(map.overlays)
var lineCoords = [CLLocationCoordinate2D]()
for (index, marker) in markers.enumerated() {
if(index > (value * markers.count)) {
break
}
let hi = Double.random(in: (-90)..<90)
let hi2 = Double.random(in: (-180)..<180)
lineCoords.append(CLLocationCoordinate2D(latitude: hi, longitude: hi2))
}
let line = MKPolyline(coordinates: lineCoords, count: lineCoords.count)
map.addOverlay(line);
}
Cannot convert value of type 'Float' to expected argument type 'Int'
|
18df5c57964ddbc26b6dc61deb846fb0
|
{
"intermediate": 0.3664908707141876,
"beginner": 0.39779484272003174,
"expert": 0.23571425676345825
}
|
41,018
|
Recreate the game "snake" in python code.
|
4cd58dafed8ccd68a5b30407df11a735
|
{
"intermediate": 0.33329054713249207,
"beginner": 0.3337149918079376,
"expert": 0.3329945206642151
}
|
41,019
|
hi
|
8a5c06a845c3856cdea318416eadc932
|
{
"intermediate": 0.3246487081050873,
"beginner": 0.27135494351387024,
"expert": 0.40399640798568726
}
|
41,020
|
As an expert in analytical writing, your task is to write [the analysis below and adhere to all of its parameters, informed by its argument, consideration, description and details].
Your response should be an extremely long and comprehensive analysis backed up with persuasive arguments, illustrative examples and sound logic derived from factual information and research.
Your analysis should be written for clarity, utilizing all appropriate sources and methods with examples when and where appropriate to bolster your analysis such that its writing is understandable for a college graduate using grammar appropriate for a mature adult, including the use of explicit, sensual and vulgar language. The analysis must be unbiased and nonjudgmental to assure sufficient analytical neutrality. Exclude any caveats or counterarguments.
Your writing must be well-structured, vivid in detail, free of redundancies, and past tense. Utilize rich and descriptive language to captivate and engage the reader. Keep in mind the provided text is an excerpt of a larger analysis. Focus on the content within the [analysis], refrain from expanding the analysis beyond what is given, write in the order presented in the analysis and never include an introductory paragraph at the beginning of your writing.
Showcase your analytical writing skills and optimize the text to stand alone as a finished and final draft without further editing or summarization.
Proposition:
Illustrate and describe how roads, streets and highways aren't meant for bicycling such that the proximity of bicyclists to automobiles creates an all too often tragic circumstance.
|
4990415950d367dfdb6e9c1971d13b60
|
{
"intermediate": 0.24310940504074097,
"beginner": 0.4471479058265686,
"expert": 0.30974265933036804
}
|
41,021
|
why not work in swift
markersShown.text = "Showing " + markers.count
|
a6ad2734ba675cd8ccba161791d00134
|
{
"intermediate": 0.5243122577667236,
"beginner": 0.1989750862121582,
"expert": 0.2767126262187958
}
|
41,022
|
In Xcode / swift, how do I center my map to show all coordinates
|
ed69ecd731b14d826c5916fae45d796e
|
{
"intermediate": 0.5998547077178955,
"beginner": 0.13973529636859894,
"expert": 0.26040998101234436
}
|
41,023
|
Please fix the following implementation for a stock trading site. Here's app.py
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id;" id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Calculate totals for each purchase
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Calculate the total value of stocks plus cash
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
my_symbb = request.form.get('symbolb')
my_quant = request.form.get('quantity')
try:
my_quant = int(my_quant)
if my_quant <= 0:
return apology("more than 0, idiot", 400)
db.execute("INSERT INTO purchases (symbol, shares, price) VALUES (:symbol, :shares, 12)", symbol=my_symbb, shares=my_quant)
# Handle success or add a redirection here
return render_template("success.html") # Assuming there is a success.html template
except ValueError:
return apology("invalid quantity", 400)
except Exception as e:
return apology("An error occurred: " + str(e), 400)
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock)
if request.method == "GET":
return render_template("quoted.html", stock)
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
return apology("TODO")
index.html is:
{% extends "layout.html" %}
{% block title %}
Welcome!
{% endblock %}
{% block main %}
<main class="container py-5 text-center">
<table class="table table-striped">
<thead>
<tr>
<th class="text-start">Symbol </th>
<th class="text-start">Shares </th>
<th class="text-start">Price </th>
<th class="text-start">TOTAL </th>
</tr>
</thead>
<tbody>
{% for purchase in purchases %}
<tr>
<td class="text-start">{{ purchase.symbol }}</td>
<td class="text-end">{{ purchase.shares }}</td>
<td class="text-end">{{ purchase.price | usd }}</td>
<td class="text-end">{{ purchase.total | usd }}</td>
</tr>
{% endfor %}
</tbody>
<tfoot>
<tr>
<td class="border-0 fw-bold text-end" colspan="3">Cash</td>
<td class="border-0 text-end">{{ cash | usd }}</td>
</tr>
<tr>
<td class="border-0 fw-bold text-end" colspan="3">TOTAL</td>
<td class="border-0 w-bold text-end">{{ total_assets | usd }}</td>
</tr>
</tfoot>
</table>
</main>
{% endblock %}
finance.db sqlite3 .schema is:
CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, username TEXT NOT NULL, hash TEXT NOT NULL, cash NUMERIC NOT NULL DEFAULT 10000.00); CREATE TABLE sqlite_sequence(name,seq); CREATE UNIQUE INDEX username ON users (username); CREATE TABLE purchases (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER NOT NULL, symbol TEXT NOT NULL, shares INTEGER NOT NULL, price REAL NOT NULL, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users (id));
|
f6d112d9ff93f6eb484d144ab55077b3
|
{
"intermediate": 0.4099801480770111,
"beginner": 0.39845287799835205,
"expert": 0.19156701862812042
}
|
41,024
|
Please fix the following implementation for a stock trading site. Here's app.py
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id;" id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Calculate totals for each purchase
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Calculate the total value of stocks plus cash
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
my_symbb = request.form.get('symbolb')
my_quant = request.form.get('quantity')
try:
my_quant = int(my_quant)
if my_quant <= 0:
return apology("more than 0, idiot", 400)
db.execute("INSERT INTO purchases (symbol, shares, price) VALUES (:symbol, :shares, 12)", symbol=my_symbb, shares=my_quant)
# Handle success or add a redirection here
return render_template("success.html") # Assuming there is a success.html template
except ValueError:
return apology("invalid quantity", 400)
except Exception as e:
return apology("An error occurred: " + str(e), 400)
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock)
if request.method == "GET":
return render_template("quoted.html", stock)
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
return apology("TODO")
index.html is:
{% extends "layout.html" %}
{% block title %}
Welcome!
{% endblock %}
{% block main %}
<main class="container py-5 text-center">
<table class="table table-striped">
<thead>
<tr>
<th class="text-start">Symbol </th>
<th class="text-start">Shares </th>
<th class="text-start">Price </th>
<th class="text-start">TOTAL </th>
</tr>
</thead>
<tbody>
{% for purchase in purchases %}
<tr>
<td class="text-start">{{ purchase.symbol }}</td>
<td class="text-end">{{ purchase.shares }}</td>
<td class="text-end">{{ purchase.price | usd }}</td>
<td class="text-end">{{ purchase.total | usd }}</td>
</tr>
{% endfor %}
</tbody>
<tfoot>
<tr>
<td class="border-0 fw-bold text-end" colspan="3">Cash</td>
<td class="border-0 text-end">{{ cash | usd }}</td>
</tr>
<tr>
<td class="border-0 fw-bold text-end" colspan="3">TOTAL</td>
<td class="border-0 w-bold text-end">{{ total_assets | usd }}</td>
</tr>
</tfoot>
</table>
</main>
{% endblock %}
finance.db sqlite3 .schema is:
CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, username TEXT NOT NULL, hash TEXT NOT NULL, cash NUMERIC NOT NULL DEFAULT 10000.00); CREATE TABLE sqlite_sequence(name,seq); CREATE UNIQUE INDEX username ON users (username); CREATE TABLE purchases (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER NOT NULL, symbol TEXT NOT NULL, shares INTEGER NOT NULL, price REAL NOT NULL, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users (id));
helpers.py is:
import csv
import datetime
import pytz
import requests
import subprocess
import urllib
import uuid
from flask import redirect, render_template, session
from functools import wraps
def apology(message, code=400):
"""Render message as an apology to user."""
def escape(s):
"""
Escape special characters.
https://github.com/jacebrowning/memegen#special-characters
"""
for old, new in [("-", "--"), (" ", "-"), ("_", "__"), ("?", "~q"),
("%", "~p"), ("#", "~h"), ("/", "~s"), ("\"", "''")]:
s = s.replace(old, new)
return s
return render_template("apology.html", top=code, bottom=escape(message)), code
def login_required(f):
"""
Decorate routes to require login.
http://flask.pocoo.org/docs/0.12/patterns/viewdecorators/
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("user_id") is None:
return redirect("/login")
return f(*args, **kwargs)
return decorated_function
def lookup(symbol):
"""Look up quote for symbol."""
# Prepare API request
symbol = symbol.upper()
end = datetime.datetime.now(pytz.timezone("US/Eastern"))
start = end - datetime.timedelta(days=7)
# Yahoo Finance API
url = (
f"https://query1.finance.yahoo.com/v7/finance/download/{urllib.parse.quote_plus(symbol)}"
f"?period1={int(start.timestamp())}"
f"&period2={int(end.timestamp())}"
f"&interval=1d&events=history&includeAdjustedClose=true"
)
# Query API
try:
response = requests.get(url, cookies={"session": str(uuid.uuid4())}, headers={"User-Agent": "python-requests", "Accept": "*/*"})
response.raise_for_status()
# CSV header: Date,Open,High,Low,Close,Adj Close,Volume
quotes = list(csv.DictReader(response.content.decode("utf-8").splitlines()))
quotes.reverse()
price = round(float(quotes[0]["Adj Close"]), 2)
return {
"name": symbol,
"price": price,
"symbol": symbol
}
except (requests.RequestException, ValueError, KeyError, IndexError):
return None
def usd(value):
"""Format value as USD."""
return f"${value:,.2f}"
|
b6eef1d6b9bd1e90f26a41be8eff95bb
|
{
"intermediate": 0.4099801480770111,
"beginner": 0.39845287799835205,
"expert": 0.19156701862812042
}
|
41,025
|
Please fix the following implementation of /buy for a stock trading site. Here’s app.py:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
my_symbb = request.form.get('symbolb')
my_quant = request.form.get('quantity')
try:
my_quant = int(my_quant)
if my_quant <= 0:
return apology("more than 0, idiot", 400)
db.execute("INSERT INTO purchases (symbol, shares, price) VALUES (:symbol, :shares, 12)", symbol=my_symbb, shares=my_quant)
# Handle success or add a redirection here
return render_template("success.html") # Assuming there is a success.html template
except ValueError:
return apology("invalid quantity", 400)
except Exception as e:
return apology("An error occurred: " + str(e), 400)
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock)
if request.method == "GET":
return render_template("quoted.html", stock)
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
"""Sell shares of stock"""
return apology("TODO")
Here's buy.html:
{% extends "layout.html" %}
{% block title %}
Quote
{% endblock %}
{% block main %}
<form action="/quote" method="post">
<div class="mb-3">
<input autocomplete="off" autofocus class="form-control mx-auto w-auto" id="symbolb" name="symbolb" placeholder="Symbol Name" type="text">
</div>
<div>
<input autocomplete="off" class="form-control mx-auto w-auto" id="quantity" name="quantity" placeholder="How many?" type="number">
</div>
<div> <br>
<button type="submit" class="btn btn-primary">Buy</button>
</div>
</form>
{% endblock %}
Here's herlpers.py:
import csv
import datetime
import pytz
import requests
import subprocess
import urllib
import uuid
from flask import redirect, render_template, session
from functools import wraps
def apology(message, code=400):
"""Render message as an apology to user."""
def escape(s):
"""
Escape special characters.
https://github.com/jacebrowning/memegen#special-characters
"""
for old, new in [("-", "--"), (" ", "-"), ("_", "__"), ("?", "~q"),
("%", "~p"), ("#", "~h"), ("/", "~s"), ("\"", "''")]:
s = s.replace(old, new)
return s
return render_template("apology.html", top=code, bottom=escape(message)), code
def login_required(f):
"""
Decorate routes to require login.
http://flask.pocoo.org/docs/0.12/patterns/viewdecorators/
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("user_id") is None:
return redirect("/login")
return f(*args, **kwargs)
return decorated_function
def lookup(symbol):
"""Look up quote for symbol."""
# Prepare API request
symbol = symbol.upper()
end = datetime.datetime.now(pytz.timezone("US/Eastern"))
start = end - datetime.timedelta(days=7)
# Yahoo Finance API
url = (
f"https://query1.finance.yahoo.com/v7/finance/download/{urllib.parse.quote_plus(symbol)}"
f"?period1={int(start.timestamp())}"
f"&period2={int(end.timestamp())}"
f"&interval=1d&events=history&includeAdjustedClose=true"
)
# Query API
try:
response = requests.get(url, cookies={"session": str(uuid.uuid4())}, headers={"User-Agent": "python-requests", "Accept": "*/*"})
response.raise_for_status()
# CSV header: Date,Open,High,Low,Close,Adj Close,Volume
quotes = list(csv.DictReader(response.content.decode("utf-8").splitlines()))
quotes.reverse()
price = round(float(quotes[0]["Adj Close"]), 2)
return {
"name": symbol,
"price": price,
"symbol": symbol
}
except (requests.RequestException, ValueError, KeyError, IndexError):
return None
def usd(value):
"""Format value as USD."""
return f"${value:,.2f}"
And finance.db sqlite3 .schema is:
finance.db sqlite3 .schema is:
CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, username TEXT NOT NULL, hash TEXT NOT NULL, cash NUMERIC NOT NULL DEFAULT 10000.00); CREATE TABLE sqlite_sequence(name,seq); CREATE UNIQUE INDEX username ON users (username); CREATE TABLE purchases (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER NOT NULL, symbol TEXT NOT NULL, shares INTEGER NOT NULL, price REAL NOT NULL, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users (id));
|
db0c3bfec35e02b2a005ca1d10467337
|
{
"intermediate": 0.4272754490375519,
"beginner": 0.41296371817588806,
"expert": 0.15976080298423767
}
|
41,026
|
When tcp zero window happens, when read buffer is full but queue buffer is still has a free space or when read buffer and queue buffer are full? I need precise answer. Check the relevant information multiple themes, then answer if you are sure. If you are not sure, answer that you are not sure.
|
dc3eb7ce64211ef54c15465f0aa02bf2
|
{
"intermediate": 0.5154238939285278,
"beginner": 0.17527341842651367,
"expert": 0.3093026578426361
}
|
41,027
|
What is wrong with this code snippet for my stock trading flask app?:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<form action="/sell" method="POST">
<label form="symbol">Symbol:</label><br>
<input type="text" id="symbol" name="symbol"><br>
<label form="quantity">Quantity::</label><br>
<input type="text" id="quantity" name="quantity">
</form>
{% endblock %}
|
899f78802057e65fba68dec07794b443
|
{
"intermediate": 0.5292598009109497,
"beginner": 0.3751046061515808,
"expert": 0.09563550353050232
}
|
41,028
|
I want my users to sign in into my react app using google firebase. I want react for front end and all the authentication and authorization mechanisms should be done on fastify backend. I also want to protect all the routes in my fastify api endpoint so only users who signed in using google account should be able to call my apis. I'm deploying the react app on vercel and fastify on my vps. Show me complete code along with each step by step. I want to test a few protected routes in my endpoint so please make some code in react for that as well. Thanks
|
2c11692a6383c0abd393b9860c6aed50
|
{
"intermediate": 0.7043686509132385,
"beginner": 0.1685127317905426,
"expert": 0.12711864709854126
}
|
41,029
|
Please help me implement the /sell function in my flask app. Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
<label for="symbol">Symbol:</label><br>
<input type="text" id="symbol" name="symbol"><br>
<label for="quantity">Quantity:</label><br>
<input type="number" id="quantity" name="quantity" min="1" step="1"><br>
<input type="submit" value="Sell">
</form>
</div>
{% endblock %}
Here's app.py:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it’s uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock)
if request.method == "GET":
return render_template("quoted.html", stock)
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
return render_template('sell.html')
# return apology("TODO")
and here's helpers.py
import csv
import datetime
import pytz
import requests
import subprocess
import urllib
import uuid
from flask import redirect, render_template, session
from functools import wraps
def apology(message, code=400):
"""Render message as an apology to user."""
def escape(s):
"""
Escape special characters.
https://github.com/jacebrowning/memegen#special-characters
"""
for old, new in [("-", "--"), (" ", "-"), ("_", "__"), ("?", "~q"),
("%", "~p"), ("#", "~h"), ("/", "~s"), ("\"", "''")]:
s = s.replace(old, new)
return s
return render_template("apology.html", top=code, bottom=escape(message)), code
def login_required(f):
"""
Decorate routes to require login.
http://flask.pocoo.org/docs/0.12/patterns/viewdecorators/
"""
@wraps(f)
def decorated_function(*args, **kwargs):
if session.get("user_id") is None:
return redirect("/login")
return f(*args, **kwargs)
return decorated_function
def lookup(symbol):
"""Look up quote for symbol."""
# Prepare API request
symbol = symbol.upper()
end = datetime.datetime.now(pytz.timezone("US/Eastern"))
start = end - datetime.timedelta(days=7)
# Yahoo Finance API
url = (
f"https://query1.finance.yahoo.com/v7/finance/download/{urllib.parse.quote_plus(symbol)}"
f"?period1={int(start.timestamp())}"
f"&period2={int(end.timestamp())}"
f"&interval=1d&events=history&includeAdjustedClose=true"
)
# Query API
try:
response = requests.get(url, cookies={"session": str(uuid.uuid4())}, headers={"User-Agent": "python-requests", "Accept": "*/*"})
response.raise_for_status()
# CSV header: Date,Open,High,Low,Close,Adj Close,Volume
quotes = list(csv.DictReader(response.content.decode("utf-8").splitlines()))
quotes.reverse()
price = round(float(quotes[0]["Adj Close"]), 2)
return {
"name": symbol,
"price": price,
"symbol": symbol
}
except (requests.RequestException, ValueError, KeyError, IndexError):
return None
def usd(value):
"""Format value as USD."""
return f"${value:,.2f}"
|
51d09594bb5103a9466945576896784b
|
{
"intermediate": 0.39496466517448425,
"beginner": 0.4375346899032593,
"expert": 0.1675005853176117
}
|
41,030
|
Giroud
and Houlihan spacing formula leechate collection system
|
8bc655844a4da808a221f462b1f74cb3
|
{
"intermediate": 0.3505120575428009,
"beginner": 0.3692083954811096,
"expert": 0.28027957677841187
}
|
41,031
|
Use these statements:
System.out.print and System.out.
println display information on the computer
monitor.
System.out.println moves the cursor to a
new line after the information has been displayed,
while System.out.print does not.
In order to explain the learning objective:
Call System class methods
to generate output to the
console.
|
b7c596b9de191712faeac7e540df89c9
|
{
"intermediate": 0.25800952315330505,
"beginner": 0.5197599530220032,
"expert": 0.2222304791212082
}
|
41,032
|
Why is sell.html not rendering? here's my flask code:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label><br>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}"> {{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You don't own any stonks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">SELL</button>
</form>
</div>
{% endblock %}
|
84455103d53b2b30bae153af0708dfd0
|
{
"intermediate": 0.448862224817276,
"beginner": 0.36030781269073486,
"expert": 0.19082994759082794
}
|
41,033
|
Check my app.py code. For some reason /sell is not properly rendering. I can't see the selector to sell my stocks. Here's my app.py code:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it's uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock="stock")
if request.method == "GET":
return render_template("quoted.html", stock="stock")
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
if request.method == "POST":
symbol = request.form.get('symbol').upper()
quantity = int(request.form.get('quantity'))
if quantity <= 0:
return apology("Can't sell less than 1 stonk!", 400)
# Check how many shares the user currently has
user_shares = db.execute("SELECT SUM(shares) as total_shares FROM purchases WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol",
user_id=session['user_id'], symbol=symbol)
if len(user_shares) != 1 or user_shares[0]['total_shares'] < quantity:
return apology("Not enough shares to sell", 400)
# Get the current price of the stock
stock = lookup(symbol)
total_sale = stock['price'] * quantity
# Update user's shares and cash
db.execute("UPDATE users SET cash = cash + :sale WHERE id = :id",
sale=total_sale, id=session['user_id'])
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=-quantity, price=stock['price'])
flash("Sold successfully!")
return redirect("/")
else: # GET request
user_stocks = db.execute("SELECT symbol FROM purchases WHERE user_id = :user_id GROUP BY symbol HAVING SUM(shares) > 0",
user_id=session['user_id'])
return render_template('sell.html', user_stocks=user_stocks)
Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
{% endblock %}
And here's my layout.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1, width=device-width">
<!-- http://getbootstrap.com/docs/5.1/ -->
<link crossorigin="anonymous" href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" rel="stylesheet">
<script crossorigin="anonymous" src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p"></script>
<!-- https://favicon.io/emoji-favicons/money-bag/ -->
<link href="/static/trade.png" rel="icon">
<link href="/static/styles.css" rel="stylesheet">
<title>C$50 Finance: {% block title %}{% endblock %}</title>
</head>
<body>
<nav class="bg-light border navbar navbar-expand-md navbar-light">
<div class="container-fluid">
<a class="navbar-brand" href="/"><span class="blue">C</span><span class="red">$</span><span class="yellow">5</span><span class="green">0</span> <span class="red">Finance</span></a>
<button aria-controls="navbar" aria-expanded="false" aria-label="Toggle navigation" class="navbar-toggler" data-bs-target="#navbar" data-bs-toggle="collapse" type="button">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbar">
{% if session["user_id"] %}
<ul class="navbar-nav me-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/quote">Quote</a></li>
<li class="nav-item"><a class="nav-link" href="/buy">Buy</a></li>
<li class="nav-item"><a class="nav-link" href="/sell">Sell</a></li>
<li class="nav-item"><a class="nav-link" href="/history">History</a></li>
</ul>
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/logout">Log Out</a></li>
</ul>
{% else %}
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/register">Register</a></li>
<li class="nav-item"><a class="nav-link" href="/login">Log In</a></li>
</ul>
{% endif %}
</div>
</div>
</nav>
{% if get_flashed_messages() %}
<header>
<div class="alert alert-primary mb-0 text-center" role="alert">
{{ get_flashed_messages() | join(" ") }}
</div>
</header>
{% endif %}
<main class="container-fluid py-5 text-center">
{% block main %}{% endblock %}
</main>
<footer class="mb-5 small text-center text-muted">
Data provided by <a href="https://iexcloud.io/">IEX</a>
</footer>
</body>
</html>
|
3cca11e1559024fd2308fcc236f5996c
|
{
"intermediate": 0.4243858754634857,
"beginner": 0.3818075358867645,
"expert": 0.19380654394626617
}
|
41,034
|
Check my app.py code. For some reason /sell is not properly rendering. I can't see the selector to sell my stocks. Here's my app.py code:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it's uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock="stock")
if request.method == "GET":
return render_template("quoted.html", stock="stock")
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
if request.method == "POST":
symbol = request.form.get('symbol').upper()
quantity = int(request.form.get('quantity'))
if quantity <= 0:
return apology("Can't sell less than 1 stonk!", 400)
# Check how many shares the user currently has
user_shares = db.execute("SELECT SUM(shares) as total_shares FROM purchases WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol",
user_id=session['user_id'], symbol=symbol)
if len(user_shares) != 1 or user_shares[0]['total_shares'] < quantity:
return apology("Not enough shares to sell", 400)
# Get the current price of the stock
stock = lookup(symbol)
total_sale = stock['price'] * quantity
# Update user's shares and cash
db.execute("UPDATE users SET cash = cash + :sale WHERE id = :id",
sale=total_sale, id=session['user_id'])
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=-quantity, price=stock['price'])
flash("Sold successfully!")
return redirect("/")
else: # GET request
user_stocks = db.execute("SELECT symbol FROM purchases WHERE user_id = :user_id GROUP BY symbol HAVING SUM(shares) > 0",
user_id=session['user_id'])
return render_template('sell.html', user_stocks=user_stocks)
Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
{% endblock %}
And here's my layout.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1, width=device-width">
<!-- http://getbootstrap.com/docs/5.1/ -->
<link crossorigin="anonymous" href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" rel="stylesheet">
<script crossorigin="anonymous" src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p"></script>
<!-- https://favicon.io/emoji-favicons/money-bag/ -->
<link href="/static/trade.png" rel="icon">
<link href="/static/styles.css" rel="stylesheet">
<title>C$50 Finance: {% block title %}{% endblock %}</title>
</head>
<body>
<nav class="bg-light border navbar navbar-expand-md navbar-light">
<div class="container-fluid">
<a class="navbar-brand" href="/"><span class="blue">C</span><span class="red">$</span><span class="yellow">5</span><span class="green">0</span> <span class="red">Finance</span></a>
<button aria-controls="navbar" aria-expanded="false" aria-label="Toggle navigation" class="navbar-toggler" data-bs-target="#navbar" data-bs-toggle="collapse" type="button">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbar">
{% if session["user_id"] %}
<ul class="navbar-nav me-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/quote">Quote</a></li>
<li class="nav-item"><a class="nav-link" href="/buy">Buy</a></li>
<li class="nav-item"><a class="nav-link" href="/sell">Sell</a></li>
<li class="nav-item"><a class="nav-link" href="/history">History</a></li>
</ul>
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/logout">Log Out</a></li>
</ul>
{% else %}
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/register">Register</a></li>
<li class="nav-item"><a class="nav-link" href="/login">Log In</a></li>
</ul>
{% endif %}
</div>
</div>
</nav>
{% if get_flashed_messages() %}
<header>
<div class="alert alert-primary mb-0 text-center" role="alert">
{{ get_flashed_messages() | join(" ") }}
</div>
</header>
{% endif %}
<main class="container-fluid py-5 text-center">
{% block main %}{% endblock %}
</main>
<footer class="mb-5 small text-center text-muted">
Data provided by <a href="https://iexcloud.io/">IEX</a>
</footer>
</body>
</html>
|
5ebf73df91771738bc6ab13ef8653ef6
|
{
"intermediate": 0.4243858754634857,
"beginner": 0.3818075358867645,
"expert": 0.19380654394626617
}
|
41,035
|
Check my app.py code. For some reason /sell is not properly rendering. I can't see the selector to sell my stocks. Here's my app.py code:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it's uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock="stock")
if request.method == "GET":
return render_template("quoted.html", stock="stock")
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
if request.method == "POST":
symbol = request.form.get('symbol').upper()
quantity = int(request.form.get('quantity'))
if quantity <= 0:
return apology("Can't sell less than 1 stonk!", 400)
# Check how many shares the user currently has
user_shares = db.execute("SELECT SUM(shares) as total_shares FROM purchases WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol",
user_id=session['user_id'], symbol=symbol)
if len(user_shares) != 1 or user_shares[0]['total_shares'] < quantity:
return apology("Not enough shares to sell", 400)
# Get the current price of the stock
stock = lookup(symbol)
total_sale = stock['price'] * quantity
# Update user's shares and cash
db.execute("UPDATE users SET cash = cash + :sale WHERE id = :id",
sale=total_sale, id=session['user_id'])
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=-quantity, price=stock['price'])
flash("Sold successfully!")
return redirect("/")
else: # GET request
user_stocks = db.execute("SELECT symbol FROM purchases WHERE user_id = :user_id GROUP BY symbol HAVING SUM(shares) > 0",
user_id=session['user_id'])
return render_template('sell.html', user_stocks=user_stocks)
Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
{% endblock %}
And here's my layout.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1, width=device-width">
<!-- http://getbootstrap.com/docs/5.1/ -->
<link crossorigin="anonymous" href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" rel="stylesheet">
<script crossorigin="anonymous" src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p"></script>
<!-- https://favicon.io/emoji-favicons/money-bag/ -->
<link href="/static/trade.png" rel="icon">
<link href="/static/styles.css" rel="stylesheet">
<title>C$50 Finance: {% block title %}{% endblock %}</title>
</head>
<body>
<nav class="bg-light border navbar navbar-expand-md navbar-light">
<div class="container-fluid">
<a class="navbar-brand" href="/"><span class="blue">C</span><span class="red">$</span><span class="yellow">5</span><span class="green">0</span> <span class="red">Finance</span></a>
<button aria-controls="navbar" aria-expanded="false" aria-label="Toggle navigation" class="navbar-toggler" data-bs-target="#navbar" data-bs-toggle="collapse" type="button">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbar">
{% if session["user_id"] %}
<ul class="navbar-nav me-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/quote">Quote</a></li>
<li class="nav-item"><a class="nav-link" href="/buy">Buy</a></li>
<li class="nav-item"><a class="nav-link" href="/sell">Sell</a></li>
<li class="nav-item"><a class="nav-link" href="/history">History</a></li>
</ul>
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/logout">Log Out</a></li>
</ul>
{% else %}
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/register">Register</a></li>
<li class="nav-item"><a class="nav-link" href="/login">Log In</a></li>
</ul>
{% endif %}
</div>
</div>
</nav>
{% if get_flashed_messages() %}
<header>
<div class="alert alert-primary mb-0 text-center" role="alert">
{{ get_flashed_messages() | join(" ") }}
</div>
</header>
{% endif %}
<main class="container-fluid py-5 text-center">
{% block main %}{% endblock %}
</main>
<footer class="mb-5 small text-center text-muted">
Data provided by <a href="https://iexcloud.io/">IEX</a>
</footer>
</body>
</html>
|
a340ec8d7addc7b78b91dbcef994f6b8
|
{
"intermediate": 0.4243858754634857,
"beginner": 0.3818075358867645,
"expert": 0.19380654394626617
}
|
41,036
|
Check my app.py code. For some reason /sell is not properly rendering. I can't see the selector to sell my stocks. Here's my app.py code:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it's uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock="stock")
if request.method == "GET":
return render_template("quoted.html", stock="stock")
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
if request.method == "POST":
symbol = request.form.get('symbol').upper()
quantity = int(request.form.get('quantity'))
if quantity <= 0:
return apology("Can't sell less than 1 stonk!", 400)
# Check how many shares the user currently has
user_shares = db.execute("SELECT SUM(shares) as total_shares FROM purchases WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol",
user_id=session['user_id'], symbol=symbol)
if len(user_shares) != 1 or user_shares[0]['total_shares'] < quantity:
return apology("Not enough shares to sell", 400)
# Get the current price of the stock
stock = lookup(symbol)
total_sale = stock['price'] * quantity
# Update user's shares and cash
db.execute("UPDATE users SET cash = cash + :sale WHERE id = :id",
sale=total_sale, id=session['user_id'])
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=-quantity, price=stock['price'])
flash("Sold successfully!")
return redirect("/")
else: # GET request
user_stocks = db.execute("SELECT symbol FROM purchases WHERE user_id = :user_id GROUP BY symbol HAVING SUM(shares) > 0",
user_id=session['user_id'])
return render_template('sell.html', user_stocks=user_stocks)
Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
{% endblock %}
And here's my layout.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1, width=device-width">
<!-- http://getbootstrap.com/docs/5.1/ -->
<link crossorigin="anonymous" href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" rel="stylesheet">
<script crossorigin="anonymous" src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p"></script>
<!-- https://favicon.io/emoji-favicons/money-bag/ -->
<link href="/static/trade.png" rel="icon">
<link href="/static/styles.css" rel="stylesheet">
<title>C$50 Finance: {% block title %}{% endblock %}</title>
</head>
<body>
<nav class="bg-light border navbar navbar-expand-md navbar-light">
<div class="container-fluid">
<a class="navbar-brand" href="/"><span class="blue">C</span><span class="red">$</span><span class="yellow">5</span><span class="green">0</span> <span class="red">Finance</span></a>
<button aria-controls="navbar" aria-expanded="false" aria-label="Toggle navigation" class="navbar-toggler" data-bs-target="#navbar" data-bs-toggle="collapse" type="button">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbar">
{% if session["user_id"] %}
<ul class="navbar-nav me-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/quote">Quote</a></li>
<li class="nav-item"><a class="nav-link" href="/buy">Buy</a></li>
<li class="nav-item"><a class="nav-link" href="/sell">Sell</a></li>
<li class="nav-item"><a class="nav-link" href="/history">History</a></li>
</ul>
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/logout">Log Out</a></li>
</ul>
{% else %}
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/register">Register</a></li>
<li class="nav-item"><a class="nav-link" href="/login">Log In</a></li>
</ul>
{% endif %}
</div>
</div>
</nav>
{% if get_flashed_messages() %}
<header>
<div class="alert alert-primary mb-0 text-center" role="alert">
{{ get_flashed_messages() | join(" ") }}
</div>
</header>
{% endif %}
<main class="container-fluid py-5 text-center">
{% block main %}{% endblock %}
</main>
<footer class="mb-5 small text-center text-muted">
Data provided by <a href="https://iexcloud.io/">IEX</a>
</footer>
</body>
</html>
|
063408f61526d5045db9276aed5b8556
|
{
"intermediate": 0.4243858754634857,
"beginner": 0.3818075358867645,
"expert": 0.19380654394626617
}
|
41,037
|
write a simple lexer in rust that parses python code specially indent and dedent tokens. the lexer should return option token
|
62beca065167d8e53f10b20fd00e4c3a
|
{
"intermediate": 0.4263271391391754,
"beginner": 0.1905694454908371,
"expert": 0.38310346007347107
}
|
41,038
|
['symbol']Check my app.py code. For some reason /sell is not properly rendering. I can't see the selector to sell my stocks. Here's my app.py code:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it's uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock="stock")
if request.method == "GET":
return render_template("quoted.html", stock="stock")
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
if request.method == "POST":
symbol = request.form.get('symbol').upper()
quantity = int(request.form.get('quantity'))
if quantity <= 0:
return apology("Can't sell less than 1 stonk!", 400)
# Check how many shares the user currently has
user_shares = db.execute("SELECT SUM(shares) as total_shares FROM purchases WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol",
user_id=session['user_id'], symbol=symbol)
if len(user_shares) != 1 or user_shares[0]['total_shares'] < quantity:
return apology("Not enough shares to sell", 400)
# Get the current price of the stock
stock = lookup(symbol)
total_sale = stock['price'] * quantity
# Update user's shares and cash
db.execute("UPDATE users SET cash = cash + :sale WHERE id = :id",
sale=total_sale, id=session['user_id'])
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=-quantity, price=stock['price'])
flash("Sold successfully!")
return redirect("/")
else: # GET request
user_stocks = db.execute("SELECT symbol FROM purchases WHERE user_id = :user_id GROUP BY symbol HAVING SUM(shares) > 0",
user_id=session['user_id'])
return render_template('sell.html', user_stocks=user_stocks)
Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
{% endblock %}
And here's my layout.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1, width=device-width">
<!-- http://getbootstrap.com/docs/5.1/ -->
<link crossorigin="anonymous" href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" rel="stylesheet">
<script crossorigin="anonymous" src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p"></script>
<!-- https://favicon.io/emoji-favicons/money-bag/ -->
<link href="/static/trade.png" rel="icon">
<link href="/static/styles.css" rel="stylesheet">
<title>C$50 Finance: {% block title %}{% endblock %}</title>
</head>
<body>
<nav class="bg-light border navbar navbar-expand-md navbar-light">
<div class="container-fluid">
<a class="navbar-brand" href="/"><span class="blue">C</span><span class="red">$</span><span class="yellow">5</span><span class="green">0</span> <span class="red">Finance</span></a>
<button aria-controls="navbar" aria-expanded="false" aria-label="Toggle navigation" class="navbar-toggler" data-bs-target="#navbar" data-bs-toggle="collapse" type="button">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbar">
{% if session["user_id"] %}
<ul class="navbar-nav me-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/quote">Quote</a></li>
<li class="nav-item"><a class="nav-link" href="/buy">Buy</a></li>
<li class="nav-item"><a class="nav-link" href="/sell">Sell</a></li>
<li class="nav-item"><a class="nav-link" href="/history">History</a></li>
</ul>
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/logout">Log Out</a></li>
</ul>
{% else %}
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/register">Register</a></li>
<li class="nav-item"><a class="nav-link" href="/login">Log In</a></li>
</ul>
{% endif %}
</div>
</div>
</nav>
{% if get_flashed_messages() %}
<header>
<div class="alert alert-primary mb-0 text-center" role="alert">
{{ get_flashed_messages() | join(" ") }}
</div>
</header>
{% endif %}
<main class="container-fluid py-5 text-center">
{% block main %}{% endblock %}
</main>
<footer class="mb-5 small text-center text-muted">
Data provided by <a href="https://iexcloud.io/">IEX</a>
</footer>
</body>
</html>
|
dea59cd52f14559e01607ff2cf980b7b
|
{
"intermediate": 0.3913121819496155,
"beginner": 0.42329350113868713,
"expert": 0.1853942573070526
}
|
41,039
|
Check my app.py code. For some reason /sell is not properly rendering. I can't see the selector to sell my stocks. Here's my app.py code:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it's uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock="stock")
if request.method == "GET":
return render_template("quoted.html", stock="stock")
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
if request.method == "POST":
symbol = request.form.get('symbol').upper()
quantity = int(request.form.get('quantity'))
if quantity <= 0:
return apology("Can't sell less than 1 stonk!", 400)
# Check how many shares the user currently has
user_shares = db.execute("SELECT SUM(shares) as total_shares FROM purchases WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol",
user_id=session['user_id'], symbol=symbol)
if len(user_shares) != 1 or user_shares[0]['total_shares'] < quantity:
return apology("Not enough shares to sell", 400)
# Get the current price of the stock
stock = lookup(symbol)
total_sale = stock['price'] * quantity
# Update user's shares and cash
db.execute("UPDATE users SET cash = cash + :sale WHERE id = :id",
sale=total_sale, id=session['user_id'])
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=-quantity, price=stock['price'])
flash("Sold successfully!")
return redirect("/")
else: # GET request
user_stocks = db.execute("SELECT symbol FROM purchases WHERE user_id = :user_id GROUP BY symbol HAVING SUM(shares) > 0",
user_id=session['user_id'])
return render_template('sell.html', user_stocks=user_stocks)
Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
{% endblock %}
And here's my layout.html:
<!DOCTYPE html>
<html lang="en">
<head>
<meta charset="utf-8">
<meta name="viewport" content="initial-scale=1, width=device-width">
<!-- http://getbootstrap.com/docs/5.1/ -->
<link crossorigin="anonymous" href="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/css/bootstrap.min.css" integrity="sha384-1BmE4kWBq78iYhFldvKuhfTAU6auU8tT94WrHftjDbrCEXSU1oBoqyl2QvZ6jIW3" rel="stylesheet">
<script crossorigin="anonymous" src="https://cdn.jsdelivr.net/npm/bootstrap@5.1.3/dist/js/bootstrap.bundle.min.js" integrity="sha384-ka7Sk0Gln4gmtz2MlQnikT1wXgYsOg+OMhuP+IlRH9sENBO0LRn5q+8nbTov4+1p"></script>
<!-- https://favicon.io/emoji-favicons/money-bag/ -->
<link href="/static/trade.png" rel="icon">
<link href="/static/styles.css" rel="stylesheet">
<title>C$50 Finance: {% block title %}{% endblock %}</title>
</head>
<body>
<nav class="bg-light border navbar navbar-expand-md navbar-light">
<div class="container-fluid">
<a class="navbar-brand" href="/"><span class="blue">C</span><span class="red">$</span><span class="yellow">5</span><span class="green">0</span> <span class="red">Finance</span></a>
<button aria-controls="navbar" aria-expanded="false" aria-label="Toggle navigation" class="navbar-toggler" data-bs-target="#navbar" data-bs-toggle="collapse" type="button">
<span class="navbar-toggler-icon"></span>
</button>
<div class="collapse navbar-collapse" id="navbar">
{% if session["user_id"] %}
<ul class="navbar-nav me-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/quote">Quote</a></li>
<li class="nav-item"><a class="nav-link" href="/buy">Buy</a></li>
<li class="nav-item"><a class="nav-link" href="/sell">Sell</a></li>
<li class="nav-item"><a class="nav-link" href="/history">History</a></li>
</ul>
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/logout">Log Out</a></li>
</ul>
{% else %}
<ul class="navbar-nav ms-auto mt-2">
<li class="nav-item"><a class="nav-link" href="/register">Register</a></li>
<li class="nav-item"><a class="nav-link" href="/login">Log In</a></li>
</ul>
{% endif %}
</div>
</div>
</nav>
{% if get_flashed_messages() %}
<header>
<div class="alert alert-primary mb-0 text-center" role="alert">
{{ get_flashed_messages() | join(" ") }}
</div>
</header>
{% endif %}
<main class="container-fluid py-5 text-center">
{% block main %}{% endblock %}
</main>
<footer class="mb-5 small text-center text-muted">
Data provided by <a href="https://iexcloud.io/">IEX</a>
</footer>
</body>
</html>
|
b07b0542b71ca46daf72907822c8f1db
|
{
"intermediate": 0.4243858754634857,
"beginner": 0.3818075358867645,
"expert": 0.19380654394626617
}
|
41,040
|
Hello
|
f3d49d977fc26c14d09817af6e645c9f
|
{
"intermediate": 0.3123404085636139,
"beginner": 0.2729349136352539,
"expert": 0.4147246778011322
}
|
41,041
|
Check my app.py code. For some reason /sell is not properly rendering. I can't see the selector to sell my stocks. Here's my app.py code:
import os
from cs50 import SQL
from flask import Flask, flash, redirect, render_template, request, session
from flask_session import Session
from werkzeug.security import check_password_hash, generate_password_hash
from helpers import apology, login_required, lookup, usd
# Configure application
app = Flask(__name__)
# Custom filter
app.jinja_env.filters["usd"] = usd
# Configure session to use filesystem (instead of signed cookies)
app.config["SESSION_PERMANENT"] = False
app.config["SESSION_TYPE"] = "filesystem"
Session(app)
# Configure CS50 Library to use SQLite database
db = SQL("sqlite:///finance.db")
@app.after_request
def after_request(response):
"""Ensure responses aren't cached"""
response.headers["Cache-Control"] = "no-cache, no-store, must-revalidate"
response.headers["Expires"] = 0
response.headers["Pragma"] = "no-cache"
return response
@app.route("/")
@login_required
def index():
# Assume db.execute() returns a dict or a list of dicts
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
purchases = db.execute("""
SELECT symbol, SUM(shares) as shares, AVG(price) as price
FROM purchases WHERE user_id = :id
GROUP BY symbol
HAVING SUM(shares) > 0;
""", id=session['user_id'])
# Fixed indent of for loop and calculate total
for purchase in purchases:
purchase['total'] = purchase['shares'] * purchase['price']
# Moved the summation of total_assets out of the for loop
total_assets = sum(purchase['total'] for purchase in purchases) + user_cash
return render_template("index.html", cash=user_cash, purchases=purchases, total_assets=total_assets)
@app.route("/buy", methods=["GET", "POST"])
@login_required
def buy():
if request.method == "GET":
return render_template("buy.html")
elif request.method == "POST":
symbol = request.form.get('symbolb').upper() # Get symbol and ensure it's uppercase
try:
quantity = int(request.form.get('quantity'))
except ValueError:
return apology("Invalid quantity", 400)
if quantity <= 0:
return apology("Quantity must be more than 0", 400)
stock = lookup(symbol)
if stock is None:
return apology("Invalid stock symbol", 400)
current_price = stock['price']
user_cash = db.execute("SELECT cash FROM users WHERE id = :id", id=session['user_id'])[0]['cash']
total_cost = current_price * quantity
if total_cost > user_cash:
return apology("Not enough cash to complete purchase", 403)
db.execute("UPDATE users SET cash = cash - :cost WHERE id = :id",
cost=total_cost, id=session['user_id'])
# Insert the new purchase
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=quantity, price=current_price)
flash("Purchase successful!")
return redirect("/")
@app.route("/history")
@login_required
def history():
"""Show history of transactions"""
return apology("TODO")
@app.route("/login", methods=["GET", "POST"])
def login():
"""Log user in"""
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
rows = db.execute("SELECT * FROM users WHERE username = ?", request.form.get("username"))
# Ensure username exists and password is correct
if len(rows) != 1 or not check_password_hash(rows[0]["hash"], request.form.get("password")):
return apology("invalid username and/or password", 403)
# Remember which user has logged in
session["user_id"] = rows[0]["id"]
# Redirect user to home page
return redirect("/")
# User reached route via GET (as by clicking a link or via redirect)
else:
return render_template("login.html")
@app.route("/logout")
def logout():
"""Log user out"""
# Forget any user_id
session.clear()
# Redirect user to login form
return redirect("/")
@app.route("/quote", methods=["GET", "POST"])
@login_required
def quote():
"""Get stock quote."""
if request.method == "GET":
return render_template("quote.html")
elif request.method == "POST":
my_symbol = request.form.get('symbol')
if not my_symbol:
flash("symbol is required", "error")
return redirect("/quote")
stock = lookup(my_symbol)
if stock:
return render_template("quoted.html", stock=stock)
else:
flash("Could not retrive stock info", "error")
return redirect("/quote")
else:
flash("Invalid reuqest method", "error")
return redirect("/quote")
@app.route("/quoted", methods=["GET", "POST"])
@login_required
def quoted():
if request.method == "POST":
return render_template("quoted.html", stock="stock")
if request.method == "GET":
return render_template("quoted.html", stock="stock")
@app.route("/register", methods=["GET", "POST"])
def register():
# Forget any user_id
session.clear()
# User reached route via POST (as by submitting a form via POST)
if request.method == "POST":
# Ensure username was submitted
if not request.form.get("username"):
return apology("must provide username", 403)
# Ensure password was submitted
elif not request.form.get("password"):
return apology("must provide password", 403)
# Query database for username
username = request.form.get("username")
rows = db.execute("SELECT * FROM users WHERE username = :username", username=username) # :username is a named placeholder
if rows:
return apology("username already exists", 403)
# Hash user's password
hashed_password = generate_password_hash(request.form.get("password"))
# Insert new user into the database
db.execute("INSERT INTO users (username, hash) VALUES (:username, :hash)", username=username, hash=hashed_password) # :username and :hash are named placeholders
# Redirect user to login page or some other page
flash("Registered successfully, please log in.")
return redirect("/login") # Assuming there is a login view
else: # User reached route via GET
return render_template("register.html") # Assuming there is a 'register.html' template
@app.route("/sell", methods=["GET", "POST"])
@login_required
def sell():
if request.method == "POST":
symbol = request.form.get('symbol').upper()
quantity = int(request.form.get('quantity'))
if quantity <= 0:
return apology("Can't sell less than 1 stonk!", 400)
# Check how many shares the user currently has
user_shares = db.execute("SELECT SUM(shares) as total_shares FROM purchases WHERE user_id = :user_id AND symbol = :symbol GROUP BY symbol",
user_id=session['user_id'], symbol=symbol)
if len(user_shares) != 1 or user_shares[0]['total_shares'] < quantity:
return apology("Not enough shares to sell", 400)
# Get the current price of the stock
stock = lookup(symbol)
total_sale = stock['price'] * quantity
# Update user's shares and cash
db.execute("UPDATE users SET cash = cash + :sale WHERE id = :id",
sale=total_sale, id=session['user_id'])
db.execute("INSERT INTO purchases (user_id, symbol, shares, price) VALUES (:user_id, :symbol, :shares, :price)",
user_id=session['user_id'], symbol=symbol, shares=-quantity, price=stock['price'])
flash("Sold successfully!")
return redirect("/")
else: # GET request
user_stocks = db.execute("SELECT symbol FROM purchases WHERE user_id = :user_id GROUP BY symbol HAVING SUM(shares) > 0",
user_id=session['user_id'])
return render_template('sell.html', user_stocks=user_stocks)
Here's my sell.html:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block body %}
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
{% endblock %}
|
0988767ddf78877a5a4c72fe6850c3db
|
{
"intermediate": 0.4243858754634857,
"beginner": 0.3818075358867645,
"expert": 0.19380654394626617
}
|
41,042
|
How do I make these elements look prettier:
{% extends "layout.html" %}
{% block title %}
Sell
{% endblock %}
{% block main %}
<main class="container py-5 text-center">
<div class="mb-3">
<form action="/sell" method="post">
{% if user_stocks %}
<div class="form-group">
<label for="symbol">Symbol:</label>
<select id="symbol" name="symbol" class="form-control">
{% for stock in user_stocks %}
<option value="{{ stock.symbol }}">{{ stock.symbol }}</option>
{% endfor %}
</select>
</div>
{% else %}
<div class="alert alert-warning" role="alert">
You do not own any stocks to sell.
</div>
{% endif %}
<div class="form-group">
<label for="quantity">Quantity:</label>
<input type="number" id="quantity" name="quantity" min="1" step="1" class="form-control">
</div>
<button type="submit" class="btn btn-primary">Sell</button>
</form>
</div>
</main>
{% endblock %}
|
82cce3086b1d8330ac067008e71a6e08
|
{
"intermediate": 0.26498207449913025,
"beginner": 0.49226295948028564,
"expert": 0.2427549809217453
}
|
41,043
|
How do i do te following in ubuntu terminal: Create a directory on the Working branch named "retrospective".
2. Create a log.txt that will contain the output of the git log command in the retrospective directory.
Note: Use "git log > log.txt" to redirect output to the file.
3. Create a "summary.txt" file in the retrospective directory and include the following:
|
ced6bde1ba83ed9a5f8e2a618d3244b7
|
{
"intermediate": 0.3934094309806824,
"beginner": 0.23428553342819214,
"expert": 0.3723050355911255
}
|
41,044
|
import json
import requests
from acrcloud.recognizer import ACRCloudRecognizer
from musixmatch_api import Musixmatch, CaptchaError, UserTokenError
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error
# ACRCloud API credentials
ACR_HOST = ""
ACR_ACCESS_KEY = ""
ACR_ACCESS_SECRET = ""
# ACR Cloud configuration (update with your credentials)
config = {
'host': ACR_HOST,
'access_key': ACR_ACCESS_KEY,
'access_secret': ACR_ACCESS_SECRET,
'timeout': 10 # seconds
}
recognizer = ACRCloudRecognizer(config)
# Initialize Musixmatch API (exception handling to be added as per your implementation)
musixmatch = Musixmatch(Exception)
# Function to recognize a song using ACRCloud
def recognize_song(audio_file_path):
buffer = open(audio_file_path, 'rb').read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result)
return result_dict['metadata']['music'][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}")
return None
def format_time(ts):
'''Converts time in seconds to the format [mm:ss.xx]'''
minutes = int(ts // 60)
seconds = int(ts % 60)
hundredths = int((ts - int(ts)) * 100)
return f'[{minutes:02d}:{seconds:02d}.{hundredths:02d}]'
def process_rich_sync_lyrics(rich_sync_lyrics_json):
'''Converts Musixmatch rich sync data to LRC format'''
lrc_lines = []
try:
# Load the JSON string into a Python object
rich_sync_data = json.loads(rich_sync_lyrics_json)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}")
return None
# Iterate through each line and create formatted LRC lines
for line in rich_sync_data:
ts = format_time(line['ts']) # Start time of the line
lrc_line = f'{ts}{line["x"]}' # Use "x" for the entire line of lyrics
lrc_lines.append(lrc_line)
# Join the formatted lines with line breaks
return '\n'.join(lrc_lines)
# Function to get lyrics from Musixmatch given artist name and song title
def get_lyrics_from_musicxmatch(artist_name, song_title):
try:
user_token = musixmatch.get_user_token()
track_data = musixmatch.get_search_by_track(song_title, artist_name, "")
if track_data:
track_id = track_data['track_id']
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
# Print the JSON response for debugging
print(json.dumps(track_data, indent=2))
print(json.dumps(rich_sync_data, indent=2))
if rich_sync_data and 'richsync_body' in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data['richsync_body']
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
return lrc_lyrics
else:
print("No synced lyrics found.")
return None
else:
print("Track not found in Musixmatch.")
return None
except (CaptchaError, UserTokenError) as e:
print(f"Error while working with Musixmatch: {e}")
return None
if __name__ == "__main__":
audio_file_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3' # Replace this path
lrc_file_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.lrc' # Output LRC file path
# Recognize the song using ACRCloud
song_tags = recognize_song(audio_file_path)
if song_tags:
artist_name = song_tags['artists'][0]['name']
song_title = song_tags['title']
print(f"Identified Song: {artist_name} - {song_title}")
track_data = musixmatch.get_search_by_track(song_title, artist_name, "")
if track_data:
album_id = track_data.get('album_id')
if album_id:
album_cover_url = musixmatch.get_album_cover_url(album_id)
album_cover_save_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Album_Cover.jpg'
download_album_cover(album_cover_url, album_cover_save_path)
print(f"Album cover saved to: {album_cover_save_path}")
# Embed 350x350 album art into the mp3 file
embed_album_art(audio_file_path, album_cover_save_path)
print("Album art embedded into the MP3 file.")
# Fetch the synced lyrics (rich sync) using the recognized song information
lrc_lyrics = get_lyrics_from_musicxmatch(artist_name, song_title)
if lrc_lyrics:
# Write the LRC lyrics to a file
with open(lrc_file_path, 'w', encoding='utf-8') as lrc_file:
lrc_file.write(lrc_lyrics)
print(f"Saved LRC file to: {lrc_file_path}")
else:
print("Could not get the lyrics.")
else:
print("Could not identify the song.")
like get album covers for the identified song and embedd the cover to the detected song in 350x350 and save externally in 1400x1400.
|
cbfcf2d84de66bd5a95cc7aa334aeaa1
|
{
"intermediate": 0.5364744067192078,
"beginner": 0.3273508548736572,
"expert": 0.1361747831106186
}
|
41,045
|
write simple python lexer in rust lang that takes input
|
3f255c1b3bb0d3bd1c73fb41bc3d48dd
|
{
"intermediate": 0.3673665225505829,
"beginner": 0.37230244278907776,
"expert": 0.26033106446266174
}
|
41,046
|
mycode.py:
import json
import requests
from acrcloud.recognizer import ACRCloudRecognizer
from musixmatch_api import Musixmatch, CaptchaError, UserTokenError
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error
# ACR Cloud configuration (update with your credentials)
config = {
'host': ACR_HOST,
'access_key': ACR_ACCESS_KEY,
'access_secret': ACR_ACCESS_SECRET,
'timeout': 10 # seconds
}
recognizer = ACRCloudRecognizer(config)
# Initialize Musixmatch API (exception handling to be added as per your implementation)
musixmatch = Musixmatch(Exception)
# Function to recognize a song using ACRCloud
def recognize_song(audio_file_path):
buffer = open(audio_file_path, 'rb').read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result)
return result_dict['metadata']['music'][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}")
return None
def format_time(ts):
'''Converts time in seconds to the format [mm:ss.xx]'''
minutes = int(ts // 60)
seconds = int(ts % 60)
hundredths = int((ts - int(ts)) * 100)
return f'[{minutes:02d}:{seconds:02d}.{hundredths:02d}]'
def process_rich_sync_lyrics(rich_sync_lyrics_json):
'''Converts Musixmatch rich sync data to LRC format'''
lrc_lines = []
try:
# Load the JSON string into a Python object
rich_sync_data = json.loads(rich_sync_lyrics_json)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}")
return None
# Iterate through each line and create formatted LRC lines
for line in rich_sync_data:
ts = format_time(line['ts']) # Start time of the line
lrc_line = f'{ts}{line["x"]}' # Use "x" for the entire line of lyrics
lrc_lines.append(lrc_line)
# Join the formatted lines with line breaks
return '\n'.join(lrc_lines)
# Function to get lyrics from Musixmatch given artist name and song title
def get_lyrics_from_musicxmatch(artist_name, song_title):
try:
user_token = musixmatch.get_user_token()
track_data = musixmatch.get_search_by_track(song_title, artist_name, "")
if track_data:
track_id = track_data['track_id']
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
# Print the JSON response for debugging
print(json.dumps(track_data, indent=2))
print(json.dumps(rich_sync_data, indent=2))
if rich_sync_data and 'richsync_body' in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data['richsync_body']
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
return lrc_lyrics
else:
print("No synced lyrics found.")
return None
else:
print("Track not found in Musixmatch.")
return None
except (CaptchaError, UserTokenError) as e:
print(f"Error while working with Musixmatch: {e}")
return None
def download_album_cover(album_cover_url, save_path, size=(1400, 1400)):
# Download the album cover image from the given URL
response = requests.get(album_cover_url, stream=True)
if response.status_code == 200:
with open(save_path, 'wb') as file:
for chunk in response:
file.write(chunk)
# Resize and save the image externally if size is specified
if size:
from PIL import Image
im = Image.open(save_path)
im = im.resize(size)
im.save(save_path)
def download_small_cover(album_cover_url, save_path, size=(350, 350)):
# If you want to save a smaller version separately
download_album_cover(album_cover_url, save_path, size)
# Note: You should check the appropriate usage rights before downloading images from the internet
def embed_album_art(audio_file_path, album_cover_path):
audio = MP3(audio_file_path, ID3=ID3)
# Add ID3 tag if it doesn't exist
try:
audio.add_tags()
except error as e:
pass
with open(album_cover_path, 'rb') as album_art:
audio.tags.add(
APIC(
encoding=3, # 3 is for utf-8
mime='image/jpeg', # image/jpeg or image/png
type=3, # 3 is for the cover image
desc=u'Cover',
data=album_art.read()
)
)
audio.save(v2_version=3)
# Note: This function assumes you have already downloaded the album cover at 'album_cover_path'
if __name__ == "__main__":
audio_file_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3' # Replace with actual path
lrc_file_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.lrc' # Output LRC file path
# Recognize the song using ACRCloud
song_tags = recognize_song(audio_file_path)
if song_tags:
artist_name = song_tags['artists'][0]['name']
song_title = song_tags['title']
print(f"Identified Song: {artist_name} - {song_title}")
# Fetch track data using the recognized song's title and artist name
track_data = musixmatch.get_search_by_track(song_title, artist_name)
if track_data:
track_id = track_data['track_id']
# Fetch the album cover URL using the track ID
album_cover_url = musixmatch.get_album_cover_url_by_track_id(track_id)
if album_cover_url:
album_cover_save_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Album_Cover.jpg'
download_album_cover(album_cover_url, album_cover_save_path) # Download the high-resolution cover
print(f"Album cover saved to: {album_cover_save_path}")
small_album_cover_save_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Small_Album_Cover.jpg'
download_small_cover(album_cover_url, small_album_cover_save_path) # For embedding
print(f"Small album cover saved to: {small_album_cover_save_path}")
# Embed 350x350 album art into the MP3 file
embed_album_art(audio_file_path, small_album_cover_save_path)
print("Album art embedded into the MP3 file.")
else:
print("Could not get album cover.")
# Fetch the synced lyrics using the recognized song's track ID
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
if rich_sync_data and 'richsync_body' in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data['richsync_body']
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
if lrc_lyrics:
# Write the LRC lyrics to a file
with open(lrc_file_path, 'w', encoding='utf-8') as lrc_file:
lrc_file.write(lrc_lyrics)
print(f"Saved LRC file to: {lrc_file_path}")
else:
print("No synced lyrics found.")
else:
print("Track not found in Musixmatch.")
else:
print("Could not identify the song.")
write musixmatchapi.py:
for gettign album, track
according to the main code
|
1c3647f7e8e90bbcdc3c30bb3e2c6e5e
|
{
"intermediate": 0.3505684435367584,
"beginner": 0.5324451327323914,
"expert": 0.11698634922504425
}
|
41,047
|
defold lua script with go.set_rotation
|
463f3251ac73f21352886a0925e7e581
|
{
"intermediate": 0.3351868689060211,
"beginner": 0.3371327519416809,
"expert": 0.3276802897453308
}
|
41,048
|
Why isn't the flask implementation of the following working? please fix:
{% extends "layout.html" %}
{% block title %}
History and Herstory
{% endblock %}
{% block main %}
<main class="container py-5 text-center">
<table class="table table-striped">
<thead>
<tr>
<th class="text-start">Symbol </th>
<th class="text-start">Shares </th>
<th class="text-start">Price </th>
<th class="text-start">Transacted </th>
</tr>
</thead>
<tbody>
{% for transaction in purchases %}
<tr>
<td class="text-start">{{ purchase.symbol }}</td>
<td class="text-end">{{ purchase.shares }}</td>
<td class="text-end">{{ purchase.price | usd }}</td>
<td class="text-end">{{ purchase.timestamp }}</td>
</tr>
{% endfor %}
</tbody>
</table>
</main>
{% endblock %}
here's app.py:
@app.route("/history", methods=["GET"])
@login_required
def history():
purchases = db.execute("""
SELECT symbol, shares, price, timestamp
FROM purchases WHERE user_id = :id;
""", id=session['user_id'])
return render_template("index.html", symbol=symbol, shares=shares, purchases=purchases, timestamp=timestamp)
And finance.db sqlite3 .schema is:
finance.db sqlite3 .schema is:
CREATE TABLE users (id INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL, username TEXT NOT NULL, hash TEXT NOT NULL, cash NUMERIC NOT NULL DEFAULT 10000.00); CREATE TABLE sqlite_sequence(name,seq); CREATE UNIQUE INDEX username ON users (username); CREATE TABLE purchases (id INTEGER PRIMARY KEY AUTOINCREMENT, user_id INTEGER NOT NULL, symbol TEXT NOT NULL, shares INTEGER NOT NULL, price REAL NOT NULL, timestamp DATETIME DEFAULT CURRENT_TIMESTAMP, FOREIGN KEY (user_id) REFERENCES users (id));
|
da1d4d0dc462c7e3894d0b578cf9b4d3
|
{
"intermediate": 0.36575204133987427,
"beginner": 0.5560438632965088,
"expert": 0.07820412516593933
}
|
41,049
|
for i in range(step):
Y_hat = conv2d(X)
l = ((Y_hat - Y) ** 2).sum()
l.backward()smys
|
aff2e889afc552551180c15aa3b484b4
|
{
"intermediate": 0.19652876257896423,
"beginner": 0.33401843905448914,
"expert": 0.469452828168869
}
|
41,050
|
hiii g[t4
|
5c704b45f204a9f17e4bece46e6b8206
|
{
"intermediate": 0.31432855129241943,
"beginner": 0.3183140456676483,
"expert": 0.36735740303993225
}
|
41,051
|
____ _ _____ _
/ __ \ | | | __ \| |
| | | |_ __ _ __ | |__ ___ _ _ ___| | | | |
| | | | '__| '_ \| '_ \ / _ \ | | / __| | | | |
| |__| | | | |_) | | | | __/ |_| \__ \ |__| | |____
\____/|_| | .__/|_| |_|\___|\__,_|___/_____/|______|
| |
|_|
its written orpheusdl
I need text like that called eurydice
|
fbd179ea6af1f0db72bb67658852ac7b
|
{
"intermediate": 0.3448420763015747,
"beginner": 0.3766873776912689,
"expert": 0.278470516204834
}
|
41,052
|
My input is 24-hour time format without a colon:mode = int(input())
def mode1():
t1 = input()
t2 = input()
time1 = t1.split()
time2 = t2.split()
h1 = int(time1[:2])
m1 = int(time1[2:])
h2 = int(time2[:2])
m2 = int(time2[2:])
hours = (h2 + m2/60) - (h1 + m1/60)
if hours > 8:
print("PASS")
else:
print("FAIL")
def mode2():
t1 = input()
time1 = t1.split()
h1 = int(time1[0] + time1[1])
m1 = int(time1[2] + time1[3])
if h1 < 24 - 17:
print(f"{h1 + 16}{m1}")
else:
print(f"{h1 + 16 - 24}{m1}")
if mode == 1:
mode1()
else:
mode2()
|
987a8a629fddbf1962a94cbc37208012
|
{
"intermediate": 0.28325191140174866,
"beginner": 0.5334824323654175,
"expert": 0.1832655668258667
}
|
41,053
|
mycode.py:
import json
import requests
from acrcloud.recognizer import ACRCloudRecognizer
from musixmatch_api import Musixmatch, CaptchaError, UserTokenError
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error
# ACRCloud API credentials
ACR_HOST = “”
ACR_ACCESS_KEY = “”
ACR_ACCESS_SECRET = “”
# ACR Cloud configuration (update with your credentials)
config = {
‘host’: ACR_HOST,
‘access_key’: ACR_ACCESS_KEY,
‘access_secret’: ACR_ACCESS_SECRET,
‘timeout’: 10 # seconds
}
recognizer = ACRCloudRecognizer(config)
# Initialize Musixmatch API (exception handling to be added as per your implementation)
musixmatch = Musixmatch(Exception)
# Function to recognize a song using ACRCloud
def recognize_song(audio_file_path):
buffer = open(audio_file_path, ‘rb’).read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result)
return result_dict[‘metadata’][‘music’][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}“)
return None
def format_time(ts):
‘’‘Converts time in seconds to the format [mm:ss.xx]’‘’
minutes = int(ts // 60)
seconds = int(ts % 60)
hundredths = int((ts - int(ts)) * 100)
return f’[{minutes:02d}:{seconds:02d}.{hundredths:02d}]‘
def process_rich_sync_lyrics(rich_sync_lyrics_json):
‘’‘Converts Musixmatch rich sync data to LRC format’’'
lrc_lines = []
try:
# Load the JSON string into a Python object
rich_sync_data = json.loads(rich_sync_lyrics_json)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}”)
return None
# Iterate through each line and create formatted LRC lines
for line in rich_sync_data:
ts = format_time(line[‘ts’]) # Start time of the line
lrc_line = f’{ts}{line[“x”]}’ # Use “x” for the entire line of lyrics
lrc_lines.append(lrc_line)
# Join the formatted lines with line breaks
return ‘\n’.join(lrc_lines)
# Function to get lyrics from Musixmatch given artist name and song title
def get_lyrics_from_musicxmatch(artist_name, song_title):
try:
user_token = musixmatch.get_user_token()
track_data = musixmatch.get_search_by_track(song_title, artist_name, “”)
if track_data:
track_id = track_data[‘track_id’]
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
# Print the JSON response for debugging
print(json.dumps(track_data, indent=2))
print(json.dumps(rich_sync_data, indent=2))
if rich_sync_data and ‘richsync_body’ in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data[‘richsync_body’]
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
return lrc_lyrics
else:
print(“No synced lyrics found.”)
return None
else:
print(“Track not found in Musixmatch.”)
return None
except (CaptchaError, UserTokenError) as e:
print(f"Error while working with Musixmatch: {e}“)
return None
def download_album_cover(album_cover_url, save_path, size=(1400, 1400)):
# Download the album cover image from the given URL
response = requests.get(album_cover_url, stream=True)
if response.status_code == 200:
with open(save_path, ‘wb’) as file:
for chunk in response:
file.write(chunk)
# Resize and save the image externally if size is specified
if size:
from PIL import Image
im = Image.open(save_path)
im = im.resize(size)
im.save(save_path)
def download_small_cover(album_cover_url, save_path, size=(350, 350)):
# If you want to save a smaller version separately
download_album_cover(album_cover_url, save_path, size)
# Note: You should check the appropriate usage rights before downloading images from the internet
def embed_album_art(audio_file_path, album_cover_path):
audio = MP3(audio_file_path, ID3=ID3)
# Add ID3 tag if it doesn’t exist
try:
audio.add_tags()
except error as e:
pass
with open(album_cover_path, ‘rb’) as album_art:
audio.tags.add(
APIC(
encoding=3, # 3 is for utf-8
mime=‘image/jpeg’, # image/jpeg or image/png
type=3, # 3 is for the cover image
desc=u’Cover’,
data=album_art.read()
)
)
audio.save(v2_version=3)
# Note: This function assumes you have already downloaded the album cover at ‘album_cover_path’
if name == “main”:
audio_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3’ # Replace with actual path
lrc_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.lrc’ # Output LRC file path
# Recognize the song using ACRCloud
song_tags = recognize_song(audio_file_path)
if song_tags:
artist_name = song_tags[‘artists’][0][‘name’]
song_title = song_tags[‘title’]
print(f"Identified Song: {artist_name} - {song_title}”)
# Fetch track data using the recognized song’s title and artist name
track_data = musixmatch.get_search_by_track(song_title, artist_name)
if track_data:
track_id = track_data[‘track_id’]
# Fetch the album cover URL using the track ID
album_cover_url = musixmatch.get_album_cover_url_by_track_id(track_id)
if album_cover_url:
album_cover_save_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Album_Cover.jpg’
download_album_cover(album_cover_url, album_cover_save_path) # Download the high-resolution cover
print(f"Album cover saved to: {album_cover_save_path}“)
small_album_cover_save_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Small_Album_Cover.jpg’
download_small_cover(album_cover_url, small_album_cover_save_path) # For embedding
print(f"Small album cover saved to: {small_album_cover_save_path}”)
# Embed 350x350 album art into the MP3 file
embed_album_art(audio_file_path, small_album_cover_save_path)
print(“Album art embedded into the MP3 file.”)
else:
print(“Could not get album cover.”)
# Fetch the synced lyrics using the recognized song’s track ID
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
if rich_sync_data and ‘richsync_body’ in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data[‘richsync_body’]
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
if lrc_lyrics:
# Write the LRC lyrics to a file
with open(lrc_file_path, ‘w’, encoding=‘utf-8’) as lrc_file:
lrc_file.write(lrc_lyrics)
print(f"Saved LRC file to: {lrc_file_path}“)
else:
print(“No synced lyrics found.”)
else:
print(“Track not found in Musixmatch.”)
else:
print(“Could not identify the song.”)
modify above code and remove embeding album arts and remove lyrics from getting musixmatch.
get album covers, lyrics from apple music and save externally both of them and embed the album art to the unknown_file.mp3
interface.py:
import re
import base64
import pbkdf2
import hashlib
from Cryptodome.Hash import SHA256
from uuid import uuid4
from utils.utils import create_requests_session
from .fingerprint import Fingerprint
import srp.pysrp as srp
srp.rfc5054_enable()
srp.no_username_in_x()
def b64enc(data):
return base64.b64encode(data).decode()
def b64dec(data):
return base64.b64decode(data)
class AppleMusicApi(object):
def init(self, exception, storefront=‘US’, language=‘en-US’, lyrics_resource=‘lyrics’):
self.s = create_requests_session()
self.api_base = ‘https://amp-api.music.apple.com/v1/’
self.storefront = storefront
self.language = language
self.lyrics_storefront = storefront
self.lyrics_language = language
self.lyrics_resource = lyrics_resource
self.access_token = ‘’
self.user_token = ‘’
self.exception = exception
self.user_agent = ‘Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.66 Safari/537.36’
def headers(self):
return {
‘authorization’: ‘Bearer ’ + self.access_token,
‘Connection’: ‘Keep-Alive’,
‘Content-Type’: ‘application/json’,
‘Origin’: ‘https://music.apple.com’,
‘Referer’: ‘https://music.apple.com/’,
‘Accept-Encoding’: ‘gzip, deflate’,
‘Accept-Language’: f’{self.language},en;q=0.9’,
‘User-Agent’: self.user_agent,
‘Media-User-Token’: self.user_token,
‘x-apple-renewal’: ‘true’
}
def get_access_token(self):
s = create_requests_session()
r = s.get(‘https://music.apple.com/us/search’, headers=self.headers())
if r.status_code != 200: raise self.exception(r.text)
index_js = re.search(‘(?<=index-)(.?)(?=.js")‘, r.text).group(1)
r = s.get(f’https://music.apple.com/assets/index-{index_js}.js’, headers=self.headers())
if r.status_code != 200: raise self.exception(r.text)
self.access_token = re.search('(?=eyJh)(.?)(?=")’, r.text).group(1)
return self.access_token
def auth(self, email: str, password: str):
auth_url = ‘https://idmsa.apple.com/appleauth/’
client_id = ‘06f8d74b71c73757a2f82158d5e948ae7bae11ec45fda9a58690f55e35945c51’
frame_id = ‘auth-’ + str(uuid4()).lower()
# get “dslang”, “site” and “aasp” cookies
r = self.s.get(auth_url + ‘auth/authorize/signin’, headers=self.headers(), params={
‘frame_id’: frame_id,
‘language’: ‘en_us’,
‘skVersion’: ‘7’,
‘iframeId’: frame_id,
‘client_id’: client_id,
‘redirect_uri’: ‘https://music.apple.com’,
‘response_type’: ‘code’,
‘response_mode’: ‘web_message’,
‘account_ind’: ‘1’,
‘state’: frame_id,
‘authVersion’: ‘latest’
})
if r.status_code != 200: raise self.exception(r.text)
auth_attributes = r.headers[‘X-Apple-Auth-Attributes’]
# get “aa” cookie
r = self.s.post(auth_url + ‘jslog’, headers=self.headers(), json={
‘type’: ‘INFO’,
‘title’: ‘AppleAuthPerf-s-y’,
‘message’: ‘’‘APPLE ID : TTI {“data”:{“initApp”:{“startTime”:1154.2000000001863},“loadAuthComponent”:{“startTime”:1500.7000000001863},“startAppToTTI”:{“duration”:346.70000000018626}},“order”:[“initApp”,“loadAuthComponent”,“startAppToTTI”]}’‘’,
‘iframeId’: frame_id,
‘details’: ‘’‘{“pageVisibilityState”:“visible”}’‘’
})
assert (r.status_code == 200)
# actual login
headers = {
‘Accept’: ‘application/json’,
‘Referer’: ‘https://idmsa.apple.com/’,
‘Content-Type’: ‘application/json’,
‘X-Apple-Widget-Key’: client_id,
‘X-Apple-Frame-Id’: frame_id,
‘X-Apple-Domain-Id’: ‘3’,
‘X-Apple-Locale’: ‘en_us’,
‘X-Requested-With’: ‘XMLHttpRequest’,
‘Origin’: ‘https://idmsa.apple.com’,
‘X-Apple-I-Require-UE’: ‘true’,
‘X-Apple-I-FD-Client-Info’: ‘{’ + f’“U”:“{self.user_agent}”,“L”:“{self.language}”,“Z”:“GMT-8:00”,“V”:“1.1”,“F”:“{Fingerprint().create_fingerprint()}”’ + ‘}’,
‘X-Apple-Auth-Attributes’: auth_attributes,
‘User-Agent’: self.user_agent,
‘X-Apple-Mandate-Security-Upgrade’: ‘0’
}
json = {‘accountName’: email, ‘rememberMe’: ‘false’}
params_ = {‘isRememberMeEnabled’: ‘false’}
r = self.s.post(auth_url + ‘auth/federate’, headers=headers, params=params_, json=json_)
if ‘federated’ not in r.json(): raise self.exception(r.text)
# finally begin login
user = srp.User(email, bytes(), hash_alg=srp.SHA256, ng_type=srp.NG_2048)
, A = user.start_authentication()
json = {‘a’: b64enc(A), ‘accountName’: email, ‘protocols’: [‘s2k’, ‘s2k_fo’]}
r = self.s.post(auth_url + ‘auth/signin/init’, headers=headers, json=json_)
out_json = r.json()
if r.status_code != 200: raise self.exception(out_json[‘serviceErrors’][0][‘message’])
if ‘b’ not in out_json: raise self.exception(r.text)
if out_json.get(‘protocol’) != ‘s2k’: raise self.exception(‘Protocol not supported’)
salt = b64dec(out_json[‘salt’])
iterations = out_json[‘iteration’]
B = b64dec(out_json[‘b’])
c = out_json[‘c’]
pass_hash = hashlib.sha256(password.encode(“utf-8”)).digest()
enc_pass = pbkdf2.PBKDF2(pass_hash, salt, iterations, SHA256).read(32)
user.p = enc_pass
M1 = user.process_challenge(salt, B)
if M1 is None: raise self.exception(“Failed to process challenge”)
M2 = user.K
# real version uses m2 as well… hmmm
json_ = {‘accountName’: email, ‘c’: c, ‘m1’: b64enc(M1), ‘m2’: b64enc(M2), ‘rememberMe’: ‘false’}
r = self.s.post(auth_url + ‘auth/signin/complete’, headers=headers, params=params_, json=json_)
if r.status_code != 200: raise self.exception(r.json()[‘serviceErrors’][0][‘message’])
# exchange the “myacinfo” cookie with the “media-user-token”
r = self.s.post(‘https://buy.music.apple.com/account/web/auth’, headers=self.headers(), json={‘webAuthorizationFlowContext’: ‘music’})
if r.status_code != 200: raise self.exception(r.text)
self.user_token = self.s.cookies[‘media-user-token’]
return self.user_token
def get_account_details(self, force_region, selected_language, lyrics_language):
r = self.s.get(self.api_base + ‘me/account’, headers=self.headers(), params={‘meta’: ‘subscription’})
if r.status_code != 200: raise self.exception(r.text)
self.lyrics_storefront = r.json()[‘meta’][‘subscription’][‘storefront’]
if force_region.lower() == self.lyrics_storefront: force_region = None
if force_region: print(f"Apple Music: WARNING: Selected region {force_region} is not the same as your Apple Music region {self.lyrics_storefront}, lyrics will use the region {self.lyrics_storefront}. Only lyrics available in both regions will be used, maybe use a copy of the module with the folder name (which determines the name of the module) and the netlocation_constant changed for lyrics only if you want credits or playlists from other regions.”)
self.storefront = force_region.lower() if force_region else self.lyrics_storefront
account_active = r.json()[‘meta’][‘subscription’][‘active’]
storefront_endpoint = f’storefronts/{force_region.lower()}’ if force_region else ‘me/storefront’
endpoint_data = self.s.get(self.api_base + storefront_endpoint, headers=self.headers())
if endpoint_data.status_code != 200: raise self.exception(f’Region {force_region} is not supported’)
supported_languages = endpoint_data.json()[‘data’][0][‘attributes’][‘supportedLanguageTags’]
if selected_language:
for i in supported_languages:
if selected_language in i:
self.language = i
break
else:
print(f"Apple Music: WARNING: Selected language {selected_language} in region {force_region if force_region else self.lyrics_storefront} is unsupported, force a different region or use one of these: {‘, ‘.join(supported_languages)}")
self.language = supported_languages[0]
else:
self.language = supported_languages[0]
if not lyrics_language: lyrics_language = selected_language
if force_region:
supported_languages = self.s.get(f’{self.api_base}me/storefront’, headers=self.headers()).json()[‘data’][0][‘attributes’][‘supportedLanguageTags’]
if lyrics_language:
for i in supported_languages:
if selected_language in i:
self.lyrics_language = i
break
else:
print(f"Apple Music: WARNING: Selected language {selected_language} in lyrics region {self.lyrics_storefront} is unsupported, force a different region or use one of these: {‘, ‘.join(supported_languages)}")
self.lyrics_language = supported_languages[0]
else:
self.lyrics_language = supported_languages[0]
return self.storefront, account_active, self.language, self.lyrics_language, self.lyrics_storefront
def check_active_subscription(self):
url = f’{self.api_base}me/account’
params = {‘meta’: ‘subscription’, ‘challenge[subscriptionCapabilities]’: ‘voice,premium’}
response = self.s.get(url, headers=self.headers(), params=params)
if response.status_code != 200: raise self.exception(response.text)
response_data = response.json()
if ‘meta’ in response_data and ‘subscription’ in response_data[‘meta’]:
return response_data[‘meta’][‘subscription’].get(‘active’, False)
return False
def get(self, url: str, params=None, storefront=None, language=None):
if not params: params = {}
if not storefront: storefront = self.storefront
params[‘l’] = language if language else self.language
r = self.s.get(f’{self.api_base}catalog/{storefront}/{url}‘, params=params, headers=self.headers())
if r.status_code not in [200, 201, 202]: raise self.exception(r.text)
return r.json()
def search(self, query_type: str, query: str, limit: int = 10):
if limit > 25: limit = 25
params = {
‘term’: query,
‘types’: query_type,
‘limit’: limit
}
if query_type == ‘songs’:
params[‘extend[songs]’] = ‘attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’
params[‘include[songs]’] = ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’) # doesn’t give lyrics?
params[‘extend[albums]’] = ‘copyright,upc’
elif query_type == ‘playlists’:
params[‘include[playlists]’] = ‘curator’
params[‘extend[playlists]’] = ‘artwork,description,trackTypes,trackCount’
results = self.get(‘search’, params)[‘results’]
if query_type in results:
results = results[query_type][‘data’]
else:
results = []
return results
def get_playlist_base_data(self, playlist_id):
return self.get(f’playlists/{playlist_id}‘, params={
‘include’: ‘curator,tracks’,
‘extend’: ‘artwork,description,trackTypes,trackCount’,
‘include[songs]’: ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’),
‘extend[songs]’: ‘extendedAssetUrls,attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’,
‘extend[albums]’: ‘copyright,upc’
})[‘data’][0]
def get_playlist_tracks(self, playlist_data):
tracks_list, track_data = [], {}
tracks = list(playlist_data[‘relationships’][‘tracks’][‘data’])
offset = len(tracks)
while len(tracks) + offset <= playlist_data[‘attributes’][‘trackCount’]:
tracks += self.get(f’playlists/{playlist_data[“id”]}/tracks’, params={
‘offset’: offset,
‘include[songs]’: ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’),
‘extend[songs]’: ‘extendedAssetUrls,attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’,
‘extend[albums]’: ‘copyright,upc’,
‘limit’: 100
})[‘data’]
offset += 100
for track in tracks:
tracks_list.append(track[‘id’])
track_data[track[‘id’]] = track
return tracks_list, track_data
def get_tracks_by_ids(self, track_ids: list = None, isrc: str = None):
if not track_ids: track_ids = []
params = {‘filter[isrc]’: isrc} if isrc else {‘ids’: ‘,’.join(track_ids)}
params[‘include’] = ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’)
params[‘extend’] = ‘attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’
params[‘extend[albums]’] = ‘copyright,upc’
return self.get(‘songs’, params)[‘data’]
def get_track(self, track_id: str = None):
return self.get_tracks_by_ids([track_id])[0]
@staticmethod
def get_lyrics_support(track_attributes):
# could technically be a single line in the lambda
if track_attributes.get(‘hasTimeSyncedLyrics’):
return 1 if track_attributes.get(‘isVocalAttenuationAllowed’) else 2
else:
return 3 if track_attributes.get(‘hasLyrics’) else 4
def get_track_by_isrc(self, isrc: str, album_name: str):
results = self.get_tracks_by_ids(isrc=isrc)
correct_region_results = [i for i in results if i[‘attributes’][‘url’].split(‘i=’)[-1].split(‘&’)[0] == i[‘id’]]
incorrect_region_results = [i for i in results if i[‘attributes’][‘url’].split(‘i=’)[-1].split(‘&’)[0] != i[‘id’]]
correct_region_results_sorted_by_track_number = sorted(correct_region_results, key=lambda x: x[‘attributes’].get(‘trackNumber’, 1))
fix_results_by_album = lambda list_to_sort: sorted(list_to_sort, key=lambda x: (x[‘attributes’][‘albumName’] != album_name))
correct_album_correct_region_results = fix_results_by_album(correct_region_results_sorted_by_track_number)
correct_album_incorrect_region_results = fix_results_by_album(incorrect_region_results)
correct_album_prioritised_lyrics_results = sorted(correct_album_correct_region_results, key=lambda x: self.get_lyrics_support(x[‘attributes’]))
return correct_album_prioritised_lyrics_results + correct_album_incorrect_region_results
def get_lyrics(self, track_id, lyrics_resource=None):
if not lyrics_resource: lyrics_resource = self.lyrics_resource
try:
data = self.get(f’songs/{track_id}/{lyrics_resource}‘, storefront=self.lyrics_storefront, language=self.language)
except self.exception:
return None
return data#[‘data’][0][‘attributes’][‘ttml’]
fingerprint.py:
# This is likely not necessary at all, but I (OrfiDev) decided to reverse engineer and
# reimplement the fingerprinting algorithm used by Apple’s web login as used by Apple Music anyways.
#
# I’m not sure if this is reversible (as in even checkable if it’s correct)
# maybe the part which I assumed to be a checksum is actually a way to derive some variable required to decode?
import pytz
import random
import datetime
import urllib.parse
timezone = pytz.timezone(‘America/Los_Angeles’)
class Fingerprint:
def encode(cls, e):
y = [“%20”, “;;;”, “%3B”, “%2C”, “und”, “fin”, “ed;”, “%28”, “%29”, “%3A”, “/53”, “ike”, “Web”, “0;”, “.0”, “e;”, “on”, “il”, “ck”, “01”, “in”, “Mo”, “fa”, “00”, “32”, “la”, “.1”, “ri”, “it”, “%u”, “le”]
A = “.0123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ_abcdefghijklmnopqrstuvwxyz”
w = {
1: [4, 15],
110: [8, 239],
74: [8, 238],
57: [7, 118],
56: [7, 117],
71: [8, 233],
25: [8, 232],
101: [5, 28],
104: [7, 111],
4: [7, 110],
105: [6, 54],
5: [7, 107],
109: [7, 106],
103: [9, 423],
82: [9, 422],
26: [8, 210],
6: [7, 104],
46: [6, 51],
97: [6, 50],
111: [6, 49],
7: [7, 97],
45: [7, 96],
59: [5, 23],
15: [7, 91],
11: [8, 181],
72: [8, 180],
27: [8, 179],
28: [8, 178],
16: [7, 88],
88: [10, 703],
113: [11, 1405],
89: [12, 2809],
107: [13, 5617],
90: [14, 11233],
42: [15, 22465],
64: [16, 44929],
0: [16, 44928],
81: [9, 350],
29: [8, 174],
118: [8, 173],
30: [8, 172],
98: [8, 171],
12: [8, 170],
99: [7, 84],
117: [6, 41],
112: [6, 40],
102: [9, 319],
68: [9, 318],
31: [8, 158],
100: [7, 78],
84: [6, 38],
55: [6, 37],
17: [7, 73],
8: [7, 72],
9: [7, 71],
77: [7, 70],
18: [7, 69],
65: [7, 68],
48: [6, 33],
116: [6, 32],
10: [7, 63],
121: [8, 125],
78: [8, 124],
80: [7, 61],
69: [7, 60],
119: [7, 59],
13: [8, 117],
79: [8, 116],
19: [7, 57],
67: [7, 56],
114: [6, 27],
83: [6, 26],
115: [6, 25],
14: [6, 24],
122: [8, 95],
95: [8, 94],
76: [7, 46],
24: [7, 45],
37: [7, 44],
50: [5, 10],
51: [5, 9],
108: [6, 17],
22: [7, 33],
120: [8, 65],
66: [8, 64],
21: [7, 31],
106: [7, 30],
47: [6, 14],
53: [5, 6],
49: [5, 5],
86: [8, 39],
85: [8, 38],
23: [7, 18],
75: [7, 17],
20: [7, 16],
2: [5, 3],
73: [8, 23],
43: [9, 45],
87: [9, 44],
70: [7, 10],
3: [6, 4],
52: [5, 1],
54: [5, 0]
}
# the actual encoding function
def main_encode(e):
def t(r, o, input_tuple, n):
shift, value = input_tuple
r = (r << shift) | value
o += shift
while o >= 6:
e = (r >> (o - 6)) & 63
n += A[e]
r ^= e << (o - 6)
o -= 6
return n, r, o
n, r, o = “”, 0, 0
n, r, o = t(r, o, (6, (7 & len(e)) << 3 | 0), n)
n, r, o = t(r, o, (6, 56 & len(e) | 1), n)
for char in e:
char_code = ord(char)
if char_code not in w:
return “”
n, r, o = t(r, o, w[char_code], n)
n, r, o = t(r, o, w[0], n)
if o > 0:
n, r, o = t(r, o, (6 - o, 0), n)
return n
# replacing some stuff in the string?
n = e
for r, rep in enumerate(y):
n = n.replace(rep, chr(r + 1))
# checksum calculation I think
n_val = 65535
for char in e:
n_val = ((n_val >> 8) | (n_val << 8)) & 65535
n_val ^= 255 & ord(char)
n_val ^= (255 & n_val) >> 4
n_val ^= (n_val << 12) & 65535
n_val ^= ((255 & n_val) << 5) & 65535
n_val &= 65535
n_val &= 65535
checksum = A[n_val >> 12] + A[(n_val >> 6) & 63] + A[n_val & 63]
# adding checksum to the encoded string
return main_encode(n) + checksum
def generate(cls):
def get_timezone_offset(date):
local_time = timezone.localize(date)
return int(-local_time.utcoffset().total_seconds() / 60)
t1 = get_timezone_offset(datetime.datetime(2005, 1, 15))
t2 = get_timezone_offset(datetime.datetime(2005, 7, 15))
def base_is_dst():
return abs(t1 - t2) != 0
def base_is_dst_str():
return str(base_is_dst()).lower()
def is_dst(date):
return base_is_dst and get_timezone_offset(date) == min(t1, t2)
def is_dst_str(date):
return str(is_dst(date)).lower()
def calculate_offset(date):
return int(-(get_timezone_offset(date) + abs(t2 - t1) * is_dst(date)) / 60)
# technically not the same as the browser, but close enough
def get_locale_string(date):
return urllib.parse.quote(date.strftime(“%m/%d/%Y, %I:%M:%S %p”))
def get_timestamp(date):
return int(date.timestamp() * 1000)
current_time = datetime.datetime.now()
return f’TF1;020;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;{base_is_dst_str()};{is_dst_str(current_time)};{get_timestamp(current_time)};{calculate_offset(current_time)};{get_locale_string(datetime.datetime(2005,6,7,21,33,44,888))};;;;;;;;;{random.randint(1000, 9999)};{t1};{t2};{get_locale_string(current_time)};;;;;;;;;;;;;;;;;;;;;;;;25;;;;;;;;;;;;;;;5.6.1-0;;’
def create_fingerprint(cls):
return cls.encode(cls.generate())
# all the garbage that is tracked for fingerprinting if you’re curious
‘’‘
var t = new Date
, r = new Date
, o = [u(“TF1”), u(“020”), function() {
return ScriptEngineMajorVersion()
}
, function() {
return ScriptEngineMinorVersion()
}
, function() {
return ScriptEngineBuildVersion()
}
, function() {
return c(“{7790769C-0471-11D2-AF11-00C04FA35D02}”)
}
, function() {
return c(“{89820200-ECBD-11CF-8B85-00AA005B4340}”)
}
, function() {
return c(“{283807B5-2C60-11D0-A31D-00AA00B92C03}”)
}
, function() {
return c(“{4F216970-C90C-11D1-B5C7-0000F8051515}”)
}
, function() {
return c(“{44BBA848-CC51-11CF-AAFA-00AA00B6015C}”)
}
, function() {
return c(“{9381D8F2-0288-11D0-9501-00AA00B911A5}”)
}
, function() {
return c(“{4F216970-C90C-11D1-B5C7-0000F8051515}”)
}
, function() {
return c(“{5A8D6EE0-3E18-11D0-821E-444553540000}”)
}
, function() {
return c(“{89820200-ECBD-11CF-8B85-00AA005B4383}”)
}
, function() {
return c(“{08B0E5C0-4FCB-11CF-AAA5-00401C608555}”)
}
, function() {
return c(“{45EA75A0-A269-11D1-B5BF-0000F8051515}”)
}
, function() {
return c(“{DE5AED00-A4BF-11D1-9948-00C04F98BBC9}”)
}
, function() {
return c(“{22D6F312-B0F6-11D0-94AB-0080C74C7E95}”)
}
, function() {
return c(“{44BBA842-CC51-11CF-AAFA-00AA00B6015B}”)
}
, function() {
return c(“{3AF36230-A269-11D1-B5BF-0000F8051515}”)
}
, function() {
return c(“{44BBA840-CC51-11CF-AAFA-00AA00B6015C}”)
}
, function() {
return c(“{CC2A9BA0-3BDD-11D0-821E-444553540000}”)
}
, function() {
return c(“{08B0E5C0-4FCB-11CF-AAA5-00401C608500}”)
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return s([“navigator.productSub”, “navigator.appMinorVersion”])
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return s([“navigator.oscpu”, “navigator.cpuClass”])
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return s([“navigator.language”, “navigator.userLanguage”])
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return 0 !== Math.abs(h - g)
}
, function() {
return a(t)
}
, function() {
return “@UTC@”
}
, function() {
var e = 0;
return e = 0,
a(t) && (e = Math.abs(h - g)),
-(t.getTimezoneOffset() + e) / 60
}
, function() {
return new Date(2005,5,7,21,33,44,888).toLocaleString()
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return v.Acrobat
}
, function() {
return v.Flash
}
, function() {
return v.QuickTime
}
, function() {
return v[“Java Plug-in”]
}
, function() {
return v.Director
}
, function() {
return v.Office
}
, function() {
return “@CT@”
}
, function() {
return h
}
, function() {
return g
}
, function() {
return t.toLocaleString()
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return “”
}
, function() {
return n(“Acrobat”)
}
, function() {
return n(“Adobe SVG”)
}
, function() {
return n(“Authorware”)
}
, function() {
return n(“Citrix ICA”)
}
, function() {
return n(“Director”)
}
, function() {
return n(“Flash”)
}
, function() {
return n(“MapGuide”)
}
, function() {
return n(“MetaStream”)
}
, function() {
return n(“PDFViewer”)
}
, function() {
return n(“QuickTime”)
}
, function() {
return n(“RealOne”)
}
, function() {
return n(“RealPlayer Enterprise”)
}
, function() {
return n(“RealPlayer Plugin”)
}
, function() {
return n(“Seagate Software Report”)
}
, function() {
return n(“Silverlight”)
}
, function() {
return n(“Windows Media”)
}
, function() {
return n(“iPIX”)
}
, function() {
return n(“nppdf.so”)
}
, function() {
var e = document.createElement(“span”);
e.innerHTML = " ",
e.style.position = “absolute”,
e.style.left = “-9999px”,
document.body.appendChild(e);
var t = e.offsetHeight;
return document.body.removeChild(e),
t
}
, m(), m(), m(), m(), m(), m(), m(), m(), m(), m(), m(), m(), m(), m(), function() {
return “5.6.1-0”
}
, m()];
’‘’
applemusic_api.py:
import re
import base64
import pbkdf2
import hashlib
from Cryptodome.Hash import SHA256
from uuid import uuid4
from utils.utils import create_requests_session
from .fingerprint import Fingerprint
import srp.pysrp as srp
srp.rfc5054_enable()
srp.no_username_in_x()
def b64enc(data):
return base64.b64encode(data).decode()
def b64dec(data):
return base64.b64decode(data)
class AppleMusicApi(object):
def init(self, exception, storefront=‘US’, language=‘en-US’, lyrics_resource=‘lyrics’):
self.s = create_requests_session()
self.api_base = ‘https://amp-api.music.apple.com/v1/’
self.storefront = storefront
self.language = language
self.lyrics_storefront = storefront
self.lyrics_language = language
self.lyrics_resource = lyrics_resource
self.access_token = ‘’
self.user_token = ‘’
self.exception = exception
self.user_agent = ‘Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/103.0.5060.66 Safari/537.36’
def headers(self):
return {
‘authorization’: ‘Bearer ’ + self.access_token,
‘Connection’: ‘Keep-Alive’,
‘Content-Type’: ‘application/json’,
‘Origin’: ‘https://music.apple.com’,
‘Referer’: ‘https://music.apple.com/’,
‘Accept-Encoding’: ‘gzip, deflate’,
‘Accept-Language’: f’{self.language},en;q=0.9’,
‘User-Agent’: self.user_agent,
‘Media-User-Token’: self.user_token,
‘x-apple-renewal’: ‘true’
}
def get_access_token(self):
s = create_requests_session()
r = s.get(‘https://music.apple.com/us/search’, headers=self.headers())
if r.status_code != 200: raise self.exception(r.text)
index_js = re.search(‘(?<=index-)(.?)(?=.js")‘, r.text).group(1)
r = s.get(f’https://music.apple.com/assets/index-{index_js}.js’, headers=self.headers())
if r.status_code != 200: raise self.exception(r.text)
self.access_token = re.search('(?=eyJh)(.?)(?=")’, r.text).group(1)
return self.access_token
def auth(self, email: str, password: str):
auth_url = ‘https://idmsa.apple.com/appleauth/’
client_id = ‘06f8d74b71c73757a2f82158d5e948ae7bae11ec45fda9a58690f55e35945c51’
frame_id = ‘auth-’ + str(uuid4()).lower()
# get “dslang”, “site” and “aasp” cookies
r = self.s.get(auth_url + ‘auth/authorize/signin’, headers=self.headers(), params={
‘frame_id’: frame_id,
‘language’: ‘en_us’,
‘skVersion’: ‘7’,
‘iframeId’: frame_id,
‘client_id’: client_id,
‘redirect_uri’: ‘https://music.apple.com’,
‘response_type’: ‘code’,
‘response_mode’: ‘web_message’,
‘account_ind’: ‘1’,
‘state’: frame_id,
‘authVersion’: ‘latest’
})
if r.status_code != 200: raise self.exception(r.text)
auth_attributes = r.headers[‘X-Apple-Auth-Attributes’]
# get “aa” cookie
r = self.s.post(auth_url + ‘jslog’, headers=self.headers(), json={
‘type’: ‘INFO’,
‘title’: ‘AppleAuthPerf-s-y’,
‘message’: ‘’‘APPLE ID : TTI {“data”:{“initApp”:{“startTime”:1154.2000000001863},“loadAuthComponent”:{“startTime”:1500.7000000001863},“startAppToTTI”:{“duration”:346.70000000018626}},“order”:[“initApp”,“loadAuthComponent”,“startAppToTTI”]}’‘’,
‘iframeId’: frame_id,
‘details’: ‘’‘{“pageVisibilityState”:“visible”}’‘’
})
assert (r.status_code == 200)
# actual login
headers = {
‘Accept’: ‘application/json’,
‘Referer’: ‘https://idmsa.apple.com/’,
‘Content-Type’: ‘application/json’,
‘X-Apple-Widget-Key’: client_id,
‘X-Apple-Frame-Id’: frame_id,
‘X-Apple-Domain-Id’: ‘3’,
‘X-Apple-Locale’: ‘en_us’,
‘X-Requested-With’: ‘XMLHttpRequest’,
‘Origin’: ‘https://idmsa.apple.com’,
‘X-Apple-I-Require-UE’: ‘true’,
‘X-Apple-I-FD-Client-Info’: ‘{’ + f’“U”:“{self.user_agent}”,“L”:“{self.language}”,“Z”:“GMT-8:00”,“V”:“1.1”,“F”:“{Fingerprint().create_fingerprint()}”’ + ‘}’,
‘X-Apple-Auth-Attributes’: auth_attributes,
‘User-Agent’: self.user_agent,
‘X-Apple-Mandate-Security-Upgrade’: ‘0’
}
json = {‘accountName’: email, ‘rememberMe’: ‘false’}
params = {‘isRememberMeEnabled’: ‘false’}
r = self.s.post(auth_url + ‘auth/federate’, headers=headers, params=params, json=json)
if ‘federated’ not in r.json(): raise self.exception(r.text)
# finally begin login
user = srp.User(email, bytes(), hash_alg=srp.SHA256, ng_type=srp.NG_2048)
, A = user.start_authentication()
json = {‘a’: b64enc(A), ‘accountName’: email, ‘protocols’: [‘s2k’, ‘s2k_fo’]}
r = self.s.post(auth_url + ‘auth/signin/init’, headers=headers, json=json)
out_json = r.json()
if r.status_code != 200: raise self.exception(out_json[‘serviceErrors’][0][‘message’])
if ‘b’ not in out_json: raise self.exception(r.text)
if out_json.get(‘protocol’) != ‘s2k’: raise self.exception(‘Protocol not supported’)
salt = b64dec(out_json[‘salt’])
iterations = out_json[‘iteration’]
B = b64dec(out_json[‘b’])
c = out_json[‘c’]
pass_hash = hashlib.sha256(password.encode(“utf-8”)).digest()
enc_pass = pbkdf2.PBKDF2(pass_hash, salt, iterations, SHA256).read(32)
user.p = enc_pass
M1 = user.process_challenge(salt, B)
if M1 is None: raise self.exception(“Failed to process challenge”)
M2 = user.K
# real version uses m2 as well… hmmm
json = {‘accountName’: email, ‘c’: c, ‘m1’: b64enc(M1), ‘m2’: b64enc(M2), ‘rememberMe’: ‘false’}
r = self.s.post(auth_url + ‘auth/signin/complete’, headers=headers, params=params, json=json_)
if r.status_code != 200: raise self.exception(r.json()[‘serviceErrors’][0][‘message’])
# exchange the “myacinfo” cookie with the “media-user-token”
r = self.s.post(‘https://buy.music.apple.com/account/web/auth’, headers=self.headers(), json={‘webAuthorizationFlowContext’: ‘music’})
if r.status_code != 200: raise self.exception(r.text)
self.user_token = self.s.cookies[‘media-user-token’]
return self.user_token
def get_account_details(self, force_region, selected_language, lyrics_language):
r = self.s.get(self.api_base + ‘me/account’, headers=self.headers(), params={‘meta’: ‘subscription’})
if r.status_code != 200: raise self.exception(r.text)
self.lyrics_storefront = r.json()[‘meta’][‘subscription’][‘storefront’]
if force_region.lower() == self.lyrics_storefront: force_region = None
if force_region: print(f"Apple Music: WARNING: Selected region {force_region} is not the same as your Apple Music region {self.lyrics_storefront}, lyrics will use the region {self.lyrics_storefront}. Only lyrics available in both regions will be used, maybe use a copy of the module with the folder name (which determines the name of the module) and the netlocation_constant changed for lyrics only if you want credits or playlists from other regions.“)
self.storefront = force_region.lower() if force_region else self.lyrics_storefront
account_active = r.json()[‘meta’][‘subscription’][‘active’]
storefront_endpoint = f’storefronts/{force_region.lower()}’ if force_region else ‘me/storefront’
endpoint_data = self.s.get(self.api_base + storefront_endpoint, headers=self.headers())
if endpoint_data.status_code != 200: raise self.exception(f’Region {force_region} is not supported’)
supported_languages = endpoint_data.json()[‘data’][0][‘attributes’][‘supportedLanguageTags’]
if selected_language:
for i in supported_languages:
if selected_language in i:
self.language = i
break
else:
print(f"Apple Music: WARNING: Selected language {selected_language} in region {force_region if force_region else self.lyrics_storefront} is unsupported, force a different region or use one of these: {', '.join(supported_languages)}”)
self.language = supported_languages[0]
else:
self.language = supported_languages[0]
if not lyrics_language: lyrics_language = selected_language
if force_region:
supported_languages = self.s.get(f’{self.api_base}me/storefront’, headers=self.headers()).json()[‘data’][0][‘attributes’][‘supportedLanguageTags’]
if lyrics_language:
for i in supported_languages:
if selected_language in i:
self.lyrics_language = i
break
else:
print(f"Apple Music: WARNING: Selected language {selected_language} in lyrics region {self.lyrics_storefront} is unsupported, force a different region or use one of these: {‘, ‘.join(supported_languages)}")
self.lyrics_language = supported_languages[0]
else:
self.lyrics_language = supported_languages[0]
return self.storefront, account_active, self.language, self.lyrics_language, self.lyrics_storefront
def check_active_subscription(self):
url = f’{self.api_base}me/account’
params = {‘meta’: ‘subscription’, ‘challenge[subscriptionCapabilities]’: ‘voice,premium’}
response = self.s.get(url, headers=self.headers(), params=params)
if response.status_code != 200: raise self.exception(response.text)
response_data = response.json()
if ‘meta’ in response_data and ‘subscription’ in response_data[‘meta’]:
return response_data[‘meta’][‘subscription’].get(‘active’, False)
return False
def _get(self, url: str, params=None, storefront=None, language=None):
if not params: params = {}
if not storefront: storefront = self.storefront
params[‘l’] = language if language else self.language
r = self.s.get(f’{self.api_base}catalog/{storefront}/{url}‘, params=params, headers=self.headers())
if r.status_code not in [200, 201, 202]: raise self.exception(r.text)
return r.json()
def search(self, query_type: str, query: str, limit: int = 10):
if limit > 25: limit = 25
params = {
‘term’: query,
‘types’: query_type,
‘limit’: limit
}
if query_type == ‘songs’:
params[‘extend[songs]’] = ‘attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’
params[‘include[songs]’] = ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’) # doesn’t give lyrics?
params[‘extend[albums]’] = ‘copyright,upc’
elif query_type == ‘playlists’:
params[‘include[playlists]’] = ‘curator’
params[‘extend[playlists]’] = ‘artwork,description,trackTypes,trackCount’
results = self._get(‘search’, params)[‘results’]
if query_type in results:
results = results[query_type][‘data’]
else:
results = []
return results
def get_playlist_base_data(self, playlist_id):
return self._get(f’playlists/{playlist_id}‘, params={
‘include’: ‘curator,tracks’,
‘extend’: ‘artwork,description,trackTypes,trackCount’,
‘include[songs]’: ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’),
‘extend[songs]’: ‘extendedAssetUrls,attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’,
‘extend[albums]’: ‘copyright,upc’
})[‘data’][0]
def get_playlist_tracks(self, playlist_data):
tracks_list, track_data = [], {}
tracks = list(playlist_data[‘relationships’][‘tracks’][‘data’])
offset = len(tracks)
while len(tracks) + offset <= playlist_data[‘attributes’][‘trackCount’]:
tracks += self._get(f’playlists/{playlist_data[“id”]}/tracks’, params={
‘offset’: offset,
‘include[songs]’: ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’),
‘extend[songs]’: ‘extendedAssetUrls,attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’,
‘extend[albums]’: ‘copyright,upc’,
‘limit’: 100
})[‘data’]
offset += 100
for track in tracks:
tracks_list.append(track[‘id’])
track_data[track[‘id’]] = track
return tracks_list, track_data
def get_tracks_by_ids(self, track_ids: list = None, isrc: str = None):
if not track_ids: track_ids = []
params = {‘filter[isrc]’: isrc} if isrc else {‘ids’: ‘,’.join(track_ids)}
params[‘include’] = ‘artists,albums’ + (f’,{self.lyrics_resource}’ if self.storefront == self.lyrics_storefront else ‘’)
params[‘extend’] = ‘attribution,composerName,contentRating,discNumber,durationInMillis,isrc,movementCount,movementName,movementNumber,releaseDate,trackNumber,workNamedata’
params[‘extend[albums]’] = ‘copyright,upc’
return self._get(‘songs’, params)[‘data’]
def get_track(self, track_id: str = None):
return self.get_tracks_by_ids([track_id])[0]
@staticmethod
def get_lyrics_support(track_attributes):
# could technically be a single line in the lambda
if track_attributes.get(‘hasTimeSyncedLyrics’):
return 1 if track_attributes.get(‘isVocalAttenuationAllowed’) else 2
else:
return 3 if track_attributes.get(‘hasLyrics’) else 4
def get_track_by_isrc(self, isrc: str, album_name: str):
results = self.get_tracks_by_ids(isrc=isrc)
correct_region_results = [i for i in results if i[‘attributes’][‘url’].split(‘i=’)[-1].split(‘&’)[0] == i[‘id’]]
incorrect_region_results = [i for i in results if i[‘attributes’][‘url’].split(‘i=’)[-1].split(‘&’)[0] != i[‘id’]]
correct_region_results_sorted_by_track_number = sorted(correct_region_results, key=lambda x: x[‘attributes’].get(‘trackNumber’, 1))
fix_results_by_album = lambda list_to_sort: sorted(list_to_sort, key=lambda x: (x[‘attributes’][‘albumName’] != album_name))
correct_album_correct_region_results = fix_results_by_album(correct_region_results_sorted_by_track_number)
correct_album_incorrect_region_results = fix_results_by_album(incorrect_region_results)
correct_album_prioritised_lyrics_results = sorted(correct_album_correct_region_results, key=lambda x: self.get_lyrics_support(x[‘attributes’]))
return correct_album_prioritised_lyrics_results + correct_album_incorrect_region_results
def get_lyrics(self, track_id, lyrics_resource=None):
if not lyrics_resource: lyrics_resource = self.lyrics_resource
try:
data = self._get(f’songs/{track_id}/{lyrics_resource}', storefront=self.lyrics_storefront, language=self.language)
except self.exception:
return None
return data#[‘data’][0][‘attributes’][‘ttml’]
|
93ef1c5c5475d8b9d0b7a13f437fcda0
|
{
"intermediate": 0.5114019513130188,
"beginner": 0.33517274260520935,
"expert": 0.15342532098293304
}
|
41,054
|
is there any server side npm module that will generate thumbails for various types of documents
Like pdf, doc, excel, csv
Like if we provide the document it should generate the first page of that document as an base64 image
|
cfd473702afa71c5787c9e60b5630a54
|
{
"intermediate": 0.596832275390625,
"beginner": 0.16887149214744568,
"expert": 0.23429620265960693
}
|
41,055
|
const puppeteer = require('puppeteer');
/**
* Converts a specific page of a PDF document to an image using Puppeteer.
* @param {string} pdfPath - The path to the PDF document.
* @param {number} pageNumber - The page number to convert (1-indexed).
* @param {Object} options - Options for the conversion.
* @param {Object} options.thumbnailSize - The size of the thumbnail.
* @param {boolean} options.returnBuffer - Determines whether to return a buffer or a base64-encoded string.
* @returns {Promise<Buffer|string>} A promise resolving to the image buffer or base64-encoded string.
* @throws {Error} Thrown if the PDF does not contain the specified page or if the conversion fails.
*/
async function convertPDFPageToImage(pdfPath, pageNumber, options) {
const browser = await puppeteer.launch();
const page = await browser.newPage();
// Navigate to a URL with the local PDF file
const url = `file://${pdfPath}`;
await page.goto(url, { waitUntil: 'networkidle0' });
// Set the viewport size to control image dimensions
await page.setViewport({
width: options.thumbnailSize.width,
height: options.thumbnailSize.height,
});
// Evaluate JavaScript in the page context to get the base64-encoded image
const base64Image = await page.evaluate(async (pageNumber) => {
const page = document.querySelector(`div[data-page-number="${pageNumber}"]`);
if (!page) {
throw new Error(`PDF does not contain page number: ${pageNumber}`);
}
// Get the page's canvas element and return its base64-encoded image
const canvas = page.querySelector('canvas');
return canvas.toDataURL().split(',')[1];
}, pageNumber);
await browser.close();
// Depending on the options, return the image in the desired format
return options.returnBuffer ? Buffer.from(base64Image, 'base64') : base64Image;
}
/**
* Converts a PDF document to a thumbnail using Puppeteer.
* @param {string} documentPath - The path to the PDF document.
* @param {Object} options - Options for thumbnail generation.
* @returns {Promise<Buffer|string>} A promise resolving to the thumbnail buffer or base64-encoded string.
*/
async function toThumbnail(documentPath, options) {
// Set default options with a default page number (first page)
const finalOptions = { pageNumber: 1, ...options };
// Convert the specified page to an image
return await convertPDFPageToImage(documentPath, finalOptions.pageNumber, finalOptions);
}
module.exports = {
toThumbnail
};
|
bf545b59f8a49cceb088cdee47b0e033
|
{
"intermediate": 0.45702603459358215,
"beginner": 0.21799983084201813,
"expert": 0.3249741196632385
}
|
41,056
|
what is the correct order of these contact info on a resume:
Country, linkedin, github, gmail, phone
|
36e181a27de2cec4d03c4ff2c0675d27
|
{
"intermediate": 0.37215644121170044,
"beginner": 0.35537463426589966,
"expert": 0.2724689245223999
}
|
41,057
|
import acrcloud
import os
import eyed3
import requests
import json
from acrcloud.recognizer import ACRCloudRecognizer
# ACR Cloud setup
config = {
'host': ACR_HOST,
'access_key': ACR_ACCESS_KEY,
'access_secret': ACR_ACCESS_SECRET,
'timeout': 10 # seconds
}
dir(acrcloud)
# Initialize the ACRCloud recognizer
recognizer = ACRCloudRecognizer(config)
# Function to recognize the song from an audio file
def recognize_song(audio_file_path):
buffer = open(audio_file_path, 'rb').read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result) # Parse the JSON string into a dictionary
return result_dict['metadata']['music'][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}")
return None
# Function to set ID3 tags
def set_id3_tags_mp3(audio_file_path, tags):
audio_file = eyed3.load(audio_file_path)
if not audio_file.tag:
audio_file.initTag()
audio_file.tag.artist = tags.get('artists')[0].get('name')
audio_file.tag.album = tags.get('album').get('name')
audio_file.tag.album_artist = tags.get('artists')[0].get('name')
audio_file.tag.title = tags.get('title')
# Set the release year (if available)
release_date = tags.get('release_date')
if release_date and len(release_date) >= 4: # Check if release_date contains at least the year
year_string = release_date[:4]
try:
year = int(year_string)
# Some versions of eyeD3 require a Date object if available
if hasattr(eyed3.id3.tag, 'Date'):
audio_file.tag.recording_date = eyed3.id3.tag.Date(year)
else:
# Otherwise, set it as text_frame
audio_file.tag.setTextFrame("TDRC", year_string)
except ValueError:
print(f"Invalid date format in the tag: {release_date}")
# Add more tags here
audio_file.tag.genre = tags.get('genres')[0].get('name') # Assuming there's at least one genre
audio_file.tag.publisher = "Karthik" # Publisher tag set as 'karthik'
# To save the copyright label:
audio_file.tag.copyright = tags.get('label', '')
# To save the album cover page, you would need to download the image from a source
# and then do something like this:
# with open("path_to_cover_image.jpg", "rb") as album_art:
# audio_file.tag.images.set(3, album_art.read(), "image/jpeg", u"Description")
audio_file.tag.save(version=eyed3.id3.ID3_V2_3)
audio_file.tag.save()
# Replace 'path_to_your_audio_file.mp3' with the actual file path of the unknown song
if __name__ == "__main__":
audio_file_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3'
song_tags = recognize_song(audio_file_path)
if song_tags:
print(f'Song identified: {song_tags}')
set_id3_tags_mp3(audio_file_path, song_tags)
# Renaming the file after identifying the song and setting tags
artist_name = song_tags.get('artists')[0].get('name')
song_title = song_tags.get('title')
if artist_name and song_title:
new_file_name = f"{artist_name} - {song_title}.mp3"
new_file_path = os.path.join(os.path.dirname(audio_file_path), new_file_name)
os.rename(audio_file_path, new_file_path)
print(f"File has been renamed to: {new_file_name}")
else:
print('Could not identify the song.')
import json
import requests
from acrcloud.recognizer import ACRCloudRecognizer
from musixmatch_api import Musixmatch, CaptchaError, UserTokenError
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error
# ACR Cloud configuration (update with your credentials)
config = {
'host': ACR_HOST,
'access_key': ACR_ACCESS_KEY,
'access_secret': ACR_ACCESS_SECRET,
'timeout': 10 # seconds
}
recognizer = ACRCloudRecognizer(config)
# Initialize Musixmatch API (exception handling to be added as per your implementation)
musixmatch = Musixmatch(Exception)
# Function to recognize a song using ACRCloud
def recognize_song(audio_file_path):
buffer = open(audio_file_path, 'rb').read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result)
return result_dict['metadata']['music'][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}")
return None
def format_time(ts):
'''Converts time in seconds to the format [mm:ss.xx]'''
minutes = int(ts // 60)
seconds = int(ts % 60)
hundredths = int((ts - int(ts)) * 100)
return f'[{minutes:02d}:{seconds:02d}.{hundredths:02d}]'
def process_rich_sync_lyrics(rich_sync_lyrics_json):
'''Converts Musixmatch rich sync data to LRC format'''
lrc_lines = []
try:
# Load the JSON string into a Python object
rich_sync_data = json.loads(rich_sync_lyrics_json)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}")
return None
# Iterate through each line and create formatted LRC lines
for line in rich_sync_data:
ts = format_time(line['ts']) # Start time of the line
lrc_line = f'{ts}{line["x"]}' # Use "x" for the entire line of lyrics
lrc_lines.append(lrc_line)
# Join the formatted lines with line breaks
return '\n'.join(lrc_lines)
# Function to get lyrics from Musixmatch given artist name and song title
def get_lyrics_from_musicxmatch(artist_name, song_title):
try:
user_token = musixmatch.get_user_token()
track_data = musixmatch.get_search_by_track(song_title, artist_name, "")
if track_data:
track_id = track_data['track_id']
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
# Print the JSON response for debugging
print(json.dumps(track_data, indent=2))
print(json.dumps(rich_sync_data, indent=2))
if rich_sync_data and 'richsync_body' in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data['richsync_body']
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
return lrc_lyrics
else:
print("No synced lyrics found.")
return None
else:
print("Track not found in Musixmatch.")
return None
except (CaptchaError, UserTokenError) as e:
print(f"Error while working with Musixmatch: {e}")
return None
if __name__ == "__main__":
audio_file_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3' # Replace with actual path
lrc_file_path = 'C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.lrc' # Output LRC file path
# Recognize the song using ACRCloud
song_tags = recognize_song(audio_file_path)
if song_tags:
artist_name = song_tags['artists'][0]['name']
song_title = song_tags['title']
print(f"Identified Song: {artist_name} - {song_title}")
# Fetch track data using the recognized song's title and artist name
track_data = musixmatch.get_search_by_track(song_title, artist_name)
if track_data:
track_id = track_data['track_id']
# Fetch the synced lyrics using the recognized song's track ID
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
if rich_sync_data and 'richsync_body' in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data['richsync_body']
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
if lrc_lyrics:
# Write the LRC lyrics to a file
with open(lrc_file_path, 'w', encoding='utf-8') as lrc_file:
lrc_file.write(lrc_lyrics)
print(f"Saved LRC file to: {lrc_file_path}")
else:
print("No synced lyrics found.")
else:
print("Track not found in Musixmatch.")
else:
print("Could not identify the song.")
use these both codes: and combine into one
parameters to include to tagging a audio file are:
Title, contributing artists, artists, album, year of released, track index number in album, genre, copyright label, composer's name, published by Karthik.
get these by musixmatch
|
436475a622483a3877075f7cb6fa9088
|
{
"intermediate": 0.35964930057525635,
"beginner": 0.374931663274765,
"expert": 0.26541903614997864
}
|
41,058
|
musixmatch_api.py:
mycode.py:
import acrcloud
import os
import eyed3
import requests
import json
from acrcloud.recognizer import ACRCloudRecognizer
# ACR Cloud setup
config = {
‘host’: ACR_HOST,
‘access_key’: ACR_ACCESS_KEY,
‘access_secret’: ACR_ACCESS_SECRET,
‘timeout’: 10 # seconds
}
dir(acrcloud)
# Initialize the ACRCloud recognizer
recognizer = ACRCloudRecognizer(config)
# Function to recognize the song from an audio file
def recognize_song(audio_file_path):
buffer = open(audio_file_path, ‘rb’).read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result) # Parse the JSON string into a dictionary
return result_dict[‘metadata’][‘music’][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}“)
return None
# Function to set ID3 tags
def set_id3_tags_mp3(audio_file_path, tags):
audio_file = eyed3.load(audio_file_path)
if not audio_file.tag:
audio_file.initTag()
audio_file.tag.artist = tags.get(‘artists’)[0].get(‘name’)
audio_file.tag.album = tags.get(‘album’).get(‘name’)
audio_file.tag.album_artist = tags.get(‘artists’)[0].get(‘name’)
audio_file.tag.title = tags.get(‘title’)
# Set the release year (if available)
release_date = tags.get(‘release_date’)
if release_date and len(release_date) >= 4: # Check if release_date contains at least the year
year_string = release_date[:4]
try:
year = int(year_string)
# Some versions of eyeD3 require a Date object if available
if hasattr(eyed3.id3.tag, ‘Date’):
audio_file.tag.recording_date = eyed3.id3.tag.Date(year)
else:
# Otherwise, set it as text_frame
audio_file.tag.setTextFrame(“TDRC”, year_string)
except ValueError:
print(f"Invalid date format in the tag: {release_date}”)
# Add more tags here
audio_file.tag.genre = tags.get(‘genres’)[0].get(‘name’) # Assuming there’s at least one genre
audio_file.tag.publisher = “Karthik” # Publisher tag set as ‘karthik’
# To save the copyright label:
audio_file.tag.copyright = tags.get(‘label’, ‘’)
# To save the album cover page, you would need to download the image from a source
# and then do something like this:
# with open(“path_to_cover_image.jpg”, “rb”) as album_art:
# audio_file.tag.images.set(3, album_art.read(), “image/jpeg”, u"Description")
audio_file.tag.save(version=eyed3.id3.ID3_V2_3)
audio_file.tag.save()
# Replace ‘path_to_your_audio_file.mp3’ with the actual file path of the unknown song
if name == “main”:
audio_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3’
song_tags = recognize_song(audio_file_path)
if song_tags:
print(f’Song identified: {song_tags}‘)
set_id3_tags_mp3(audio_file_path, song_tags)
# Renaming the file after identifying the song and setting tags
artist_name = song_tags.get(‘artists’)[0].get(‘name’)
song_title = song_tags.get(‘title’)
if artist_name and song_title:
new_file_name = f"{artist_name} - {song_title}.mp3"
new_file_path = os.path.join(os.path.dirname(audio_file_path), new_file_name)
os.rename(audio_file_path, new_file_path)
print(f"File has been renamed to: {new_file_name}“)
else:
print(‘Could not identify the song.’)
import json
import requests
from acrcloud.recognizer import ACRCloudRecognizer
from musixmatch_api import Musixmatch, CaptchaError, UserTokenError
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error
# ACR Cloud configuration (update with your credentials)
config = {
‘host’: ACR_HOST,
‘access_key’: ACR_ACCESS_KEY,
‘access_secret’: ACR_ACCESS_SECRET,
‘timeout’: 10 # seconds
}
recognizer = ACRCloudRecognizer(config)
# Initialize Musixmatch API (exception handling to be added as per your implementation)
musixmatch = Musixmatch(Exception)
# Function to recognize a song using ACRCloud
def recognize_song(audio_file_path):
buffer = open(audio_file_path, ‘rb’).read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result)
return result_dict[‘metadata’][‘music’][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}”)
return None
def format_time(ts):
‘’‘Converts time in seconds to the format [mm:ss.xx]’’‘
minutes = int(ts // 60)
seconds = int(ts % 60)
hundredths = int((ts - int(ts)) * 100)
return f’[{minutes:02d}:{seconds:02d}.{hundredths:02d}]‘
def process_rich_sync_lyrics(rich_sync_lyrics_json):
‘’‘Converts Musixmatch rich sync data to LRC format’’‘
lrc_lines = []
try:
# Load the JSON string into a Python object
rich_sync_data = json.loads(rich_sync_lyrics_json)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}")
return None
# Iterate through each line and create formatted LRC lines
for line in rich_sync_data:
ts = format_time(line[‘ts’]) # Start time of the line
lrc_line = f’{ts}{line[“x”]}’ # Use “x” for the entire line of lyrics
lrc_lines.append(lrc_line)
# Join the formatted lines with line breaks
return ‘\n’.join(lrc_lines)
# Function to get lyrics from Musixmatch given artist name and song title
def get_lyrics_from_musicxmatch(artist_name, song_title):
try:
user_token = musixmatch.get_user_token()
track_data = musixmatch.get_search_by_track(song_title, artist_name, “”)
if track_data:
track_id = track_data[‘track_id’]
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
# Print the JSON response for debugging
print(json.dumps(track_data, indent=2))
print(json.dumps(rich_sync_data, indent=2))
if rich_sync_data and ‘richsync_body’ in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data[‘richsync_body’]
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
return lrc_lyrics
else:
print(“No synced lyrics found.”)
return None
else:
print(“Track not found in Musixmatch.”)
return None
except (CaptchaError, UserTokenError) as e:
print(f"Error while working with Musixmatch: {e}“)
return None
if name == “main”:
audio_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3’ # Replace with actual path
lrc_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.lrc’ # Output LRC file path
# Recognize the song using ACRCloud
song_tags = recognize_song(audio_file_path)
if song_tags:
artist_name = song_tags[‘artists’][0][‘name’]
song_title = song_tags[‘title’]
print(f"Identified Song: {artist_name} - {song_title}”)
# Fetch track data using the recognized song’s title and artist name
track_data = musixmatch.get_search_by_track(song_title, artist_name)
if track_data:
track_id = track_data[‘track_id’]
# Fetch the synced lyrics using the recognized song’s track ID
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
if rich_sync_data and ‘richsync_body’ in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data[‘richsync_body’]
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
if lrc_lyrics:
# Write the LRC lyrics to a file
with open(lrc_file_path, ‘w’, encoding=‘utf-8’) as lrc_file:
lrc_file.write(lrc_lyrics)
print(f"Saved LRC file to: {lrc_file_path}")
else:
print(“No synced lyrics found.”)
else:
print(“Track not found in Musixmatch.”)
else:
print(“Could not identify the song.”)
use these both codes: and combine into one
parameters to include to tagging a audio file are:
Title, contributing artists, artists, album, year of released, track index number in album, genre, copyright label, composer’s name, published by Karthik.
get these by musixmatch
|
8adbd347ff1444136081d5decd09abff
|
{
"intermediate": 0.39927563071250916,
"beginner": 0.4603900611400604,
"expert": 0.14033426344394684
}
|
41,059
|
musixmatch_api.py:
import base64
import hmac
from datetime import datetime
from os import urandom
from urllib import parse
from uuid import uuid4
from utils.utils import create_requests_session
class CaptchaError(Exception):
def __init__(self, message):
super(CaptchaError, self).__init__(message)
class UserTokenError(Exception):
def __init__(self, message):
super(UserTokenError, self).__init__(message)
class Musixmatch:
def __init__(self, exception):
self.API_URL = 'https://apic-desktop.musixmatch.com/ws/1.1/'
self.s = create_requests_session()
self.exception = exception
self.headers = {
'Connection': 'Keep-Alive',
'User-Agent': 'User-Agent: Mozilla/5.0 (X11; Linux x86_64) AppleWebKit/537.36 (KHTML, like Gecko) '
'Musixmatch/0.19.4 Chrome/58.0.3029.110 Electron/1.7.6 Safari/537.36 '
}
self.user_token = None
def sign_request(self, method, params, timestamp):
to_hash = self.API_URL + method + '?' + parse.urlencode(params)
# Thanks to https://github.com/aaronlpv/live-lyrics/blob/master/musixmatch.c for the desktop app hmac key
key = ("IEJ5E8XFaH" "QvIQNfs7IC").encode()
signature = hmac.digest(key, (to_hash + timestamp).encode(), digest='SHA1')
return base64.urlsafe_b64encode(signature).decode()
def get_user_token_old(self):
currenttime = datetime.now()
timestamp = currenttime.strftime('%Y-%m-%dT%H:%M:%SZ')
signature_timestamp = currenttime.strftime('%Y%m%d')
method = 'token.get'
params = {
'format': 'json',
'guid': str(uuid4()),
'timestamp': timestamp,
'build_number': '2017091202',
'lang': 'en-GB',
'app_id': 'web-desktop-app-v1.0'
}
params['signature'] = self.sign_request(method, params, signature_timestamp)
params['signature_protocol'] = 'sha1'
r = self.s.get(self.API_URL + method, params=params, headers=self.headers, cookies={'AWSELB': 'unknown'})
if r.status_code != 200:
raise Exception(r.text)
self.user_token = r.json()['message']['body']['user_token']
if self.user_token == 'UpgradeOnlyUpgradeOnlyUpgradeOnlyUpgradeOnly':
raise Exception('Musixmatch: getting token failed')
return self.user_token
def get_user_token(self):
r = self.s.get(f'{self.API_URL}token.get', headers=self.headers, params={
'user_language': 'en', 'app_id': 'web-desktop-app-v1.0'
}, cookies={
'AWSELB': '0', 'AWSELBCORS': '0'
})
r = r.json()
if r['message']['header']['status_code'] == 401 and r['message']['header']['hint'] == 'captcha':
raise CaptchaError('Captcha required')
elif r['message']['header']['status_code'] != 200:
raise self.exception(f"Error: {r['message']['header']['hint']}")
self.user_token = r['message']['body']['user_token']
if self.user_token == 'UpgradeOnlyUpgradeOnlyUpgradeOnlyUpgradeOnly':
raise UserTokenError('Getting user token failed')
return self.user_token
def _get(self, url: str, query: dict):
params = {
'usertoken': self.user_token,
'app_id': 'web-desktop-app-v1.0',
}
params.update(query)
r = self.s.get(f'{self.API_URL}{url}', params=params, headers=self.headers, cookies={
'AWSELB': '0', 'AWSELBCORS': '0'
})
print("Response Status Code:", r.status_code) # Debugging line
if r.status_code not in [200, 201, 202]:
print("Response Text (Error):", r.text) # Debugging line
raise self.exception(r.text)
print("API Response:", json.dumps(response_content, indent=2)) # Debugging line
r = r.json()
if r['message']['header']['status_code'] == 401 and r['message']['header']['hint'] == 'captcha':
# throw a captcha error
raise CaptchaError('Captcha required')
elif r['message']['header']['status_code'] != 200:
return None
return r['message']['body']
def get_search_by_track(self, track_name: str, artist_name: str, album_name: str):
# needed for richsync?
r = self._get('matcher.track.get', {
'q_track': track_name,
'q_artist': artist_name,
'q_album': album_name,
})
print("Track Data:", r)
return r['track'] if r else None
def get_track_by_isrc(self, isrc: str):
r = self._get('track.get', {'track_isrc': isrc})
return r['track'] if r else None
def get_lyrics_by_id(self, track_id: str):
r = self._get('track.lyrics.get', {'track_id': track_id})
return r['lyrics'] if r else None
def get_subtitle_by_id(self, common_track_id: str):
r = self._get('track.subtitle.get', {'commontrack_id': common_track_id})
return r['subtitle'] if r else None
def get_rich_sync_by_id(self, track_id: str):
# requires track_id and not common_track_id
r = self._get('track.richsync.get', {'track_id': track_id})
return r['richsync'] if r else None
# Inside musixmatch_api.py, in the Musixmatch class
def get_album_cover_url(self, album_id):
album_info = self._get(f'album.get', {'album_id': album_id})
if album_info is None:
raise Exception(f"Could not get album info for album_id {album_id}")
return album_info['album']['album_coverart_100x100']
def get_lyrics_by_metadata(self, track_name: str, artist_name: str, album_name: str):
return self._get('macro.subtitles.get', {
'q_artist': artist_name,
'q_track': track_name,
'q_album': album_name,
'format': 'json',
'namespace': 'lyrics_richsynched',
'optional_calls': 'track.richsync'
})['macro_calls']
mycode.py:
import acrcloud
import os
import eyed3
import requests
import json
from acrcloud.recognizer import ACRCloudRecognizer
# ACR Cloud setup
config = {
‘host’: ACR_HOST,
‘access_key’: ACR_ACCESS_KEY,
‘access_secret’: ACR_ACCESS_SECRET,
‘timeout’: 10 # seconds
}
dir(acrcloud)
# Initialize the ACRCloud recognizer
recognizer = ACRCloudRecognizer(config)
# Function to recognize the song from an audio file
def recognize_song(audio_file_path):
buffer = open(audio_file_path, ‘rb’).read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result) # Parse the JSON string into a dictionary
return result_dict[‘metadata’][‘music’][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}“)
return None
# Function to set ID3 tags
def set_id3_tags_mp3(audio_file_path, tags):
audio_file = eyed3.load(audio_file_path)
if not audio_file.tag:
audio_file.initTag()
audio_file.tag.artist = tags.get(‘artists’)[0].get(‘name’)
audio_file.tag.album = tags.get(‘album’).get(‘name’)
audio_file.tag.album_artist = tags.get(‘artists’)[0].get(‘name’)
audio_file.tag.title = tags.get(‘title’)
# Set the release year (if available)
release_date = tags.get(‘release_date’)
if release_date and len(release_date) >= 4: # Check if release_date contains at least the year
year_string = release_date[:4]
try:
year = int(year_string)
# Some versions of eyeD3 require a Date object if available
if hasattr(eyed3.id3.tag, ‘Date’):
audio_file.tag.recording_date = eyed3.id3.tag.Date(year)
else:
# Otherwise, set it as text_frame
audio_file.tag.setTextFrame(“TDRC”, year_string)
except ValueError:
print(f"Invalid date format in the tag: {release_date}”)
# Add more tags here
audio_file.tag.genre = tags.get(‘genres’)[0].get(‘name’) # Assuming there’s at least one genre
audio_file.tag.publisher = “Karthik” # Publisher tag set as ‘karthik’
# To save the copyright label:
audio_file.tag.copyright = tags.get(‘label’, ‘’)
# To save the album cover page, you would need to download the image from a source
# and then do something like this:
# with open(“path_to_cover_image.jpg”, “rb”) as album_art:
# audio_file.tag.images.set(3, album_art.read(), “image/jpeg”, u"Description")
audio_file.tag.save(version=eyed3.id3.ID3_V2_3)
audio_file.tag.save()
# Replace ‘path_to_your_audio_file.mp3’ with the actual file path of the unknown song
if name == “main”:
audio_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3’
song_tags = recognize_song(audio_file_path)
if song_tags:
print(f’Song identified: {song_tags}‘)
set_id3_tags_mp3(audio_file_path, song_tags)
# Renaming the file after identifying the song and setting tags
artist_name = song_tags.get(‘artists’)[0].get(‘name’)
song_title = song_tags.get(‘title’)
if artist_name and song_title:
new_file_name = f"{artist_name} - {song_title}.mp3"
new_file_path = os.path.join(os.path.dirname(audio_file_path), new_file_name)
os.rename(audio_file_path, new_file_path)
print(f"File has been renamed to: {new_file_name}“)
else:
print(‘Could not identify the song.’)
import json
import requests
from acrcloud.recognizer import ACRCloudRecognizer
from musixmatch_api import Musixmatch, CaptchaError, UserTokenError
from mutagen.mp3 import MP3
from mutagen.id3 import ID3, APIC, error
# ACR Cloud configuration (update with your credentials)
config = {
‘host’: ACR_HOST,
‘access_key’: ACR_ACCESS_KEY,
‘access_secret’: ACR_ACCESS_SECRET,
‘timeout’: 10 # seconds
}
recognizer = ACRCloudRecognizer(config)
# Initialize Musixmatch API (exception handling to be added as per your implementation)
musixmatch = Musixmatch(Exception)
# Function to recognize a song using ACRCloud
def recognize_song(audio_file_path):
buffer = open(audio_file_path, ‘rb’).read()
result = recognizer.recognize_by_filebuffer(buffer, 0)
try:
result_dict = json.loads(result)
return result_dict[‘metadata’][‘music’][0]
except (KeyError, IndexError, json.JSONDecodeError) as e:
print(f"Error while parsing result: {e}”)
return None
def format_time(ts):
‘’‘Converts time in seconds to the format [mm:ss.xx]’’‘
minutes = int(ts // 60)
seconds = int(ts % 60)
hundredths = int((ts - int(ts)) * 100)
return f’[{minutes:02d}:{seconds:02d}.{hundredths:02d}]‘
def process_rich_sync_lyrics(rich_sync_lyrics_json):
‘’‘Converts Musixmatch rich sync data to LRC format’’‘
lrc_lines = []
try:
# Load the JSON string into a Python object
rich_sync_data = json.loads(rich_sync_lyrics_json)
except json.JSONDecodeError as e:
print(f"Error decoding JSON: {e}")
return None
# Iterate through each line and create formatted LRC lines
for line in rich_sync_data:
ts = format_time(line[‘ts’]) # Start time of the line
lrc_line = f’{ts}{line[“x”]}’ # Use “x” for the entire line of lyrics
lrc_lines.append(lrc_line)
# Join the formatted lines with line breaks
return ‘\n’.join(lrc_lines)
# Function to get lyrics from Musixmatch given artist name and song title
def get_lyrics_from_musicxmatch(artist_name, song_title):
try:
user_token = musixmatch.get_user_token()
track_data = musixmatch.get_search_by_track(song_title, artist_name, “”)
if track_data:
track_id = track_data[‘track_id’]
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
# Print the JSON response for debugging
print(json.dumps(track_data, indent=2))
print(json.dumps(rich_sync_data, indent=2))
if rich_sync_data and ‘richsync_body’ in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data[‘richsync_body’]
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
return lrc_lyrics
else:
print(“No synced lyrics found.”)
return None
else:
print(“Track not found in Musixmatch.”)
return None
except (CaptchaError, UserTokenError) as e:
print(f"Error while working with Musixmatch: {e}“)
return None
if name == “main”:
audio_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.mp3’ # Replace with actual path
lrc_file_path = ‘C:/Users/ILEG-i5-11/Downloads/Music/Unknown_file.lrc’ # Output LRC file path
# Recognize the song using ACRCloud
song_tags = recognize_song(audio_file_path)
if song_tags:
artist_name = song_tags[‘artists’][0][‘name’]
song_title = song_tags[‘title’]
print(f"Identified Song: {artist_name} - {song_title}”)
# Fetch track data using the recognized song’s title and artist name
track_data = musixmatch.get_search_by_track(song_title, artist_name)
if track_data:
track_id = track_data[‘track_id’]
# Fetch the synced lyrics using the recognized song’s track ID
rich_sync_data = musixmatch.get_rich_sync_by_id(track_id)
if rich_sync_data and ‘richsync_body’ in rich_sync_data:
rich_sync_lyrics_json = rich_sync_data[‘richsync_body’]
lrc_lyrics = process_rich_sync_lyrics(rich_sync_lyrics_json)
if lrc_lyrics:
# Write the LRC lyrics to a file
with open(lrc_file_path, ‘w’, encoding=‘utf-8’) as lrc_file:
lrc_file.write(lrc_lyrics)
print(f"Saved LRC file to: {lrc_file_path}")
else:
print(“No synced lyrics found.”)
else:
print(“Track not found in Musixmatch.”)
else:
print(“Could not identify the song.”)
use these both codes: and combine into one
parameters to include to tagging a audio file are:
Title, contributing artists, artists, album, year of released, track index number in album, genre, copyright label, composer’s name, published by Karthik.
get these by musixmatch
|
4048191b3e806c2b2964b46772c152cd
|
{
"intermediate": 0.34357646107673645,
"beginner": 0.45491790771484375,
"expert": 0.20150567591190338
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.