row_id
int64
0
48.4k
init_message
stringlengths
1
342k
conversation_hash
stringlengths
32
32
scores
dict
37,646
import cv2 import tkinter as tk from tkinter import filedialog from PIL import Image, ImageTk import numpy as np # Set a fixed display size for the images display_size = (700, 700) # Function to load and process image def process_image(filter_type): global image, panelA, path if path: # Load the image and convert it to grayscale image = cv2.imread(path) # Resize the image to fit the display size image_resized = cv2.resize(image, display_size, interpolation=cv2.INTER_AREA) if image_resized.ndim == 3 and filter_type != 'original': # Convert to grayscale for filtering if needed image_resized = cv2.cvtColor(image_resized, cv2.COLOR_BGR2GRAY) # Kernel (custom filtering) kernel = np.array([[0, -1, 0], [-1, 5, -1], [0, -1, 0]]) sharpened_img = cv2.filter2D(resized_img, -1, kernel) # Sobel operator sobelx = cv2.Sobel(resized_img, cv2.CV_64F, 1, 0, ksize=3) sobely = cv2.Sobel(resized_img, cv2.CV_64F, 0, 1, ksize=3) # Laplacian laplacian = cv2.Laplacian(resized_img, cv2.CV_64F) # Canny edge detection canny_edges = cv2.Canny(resized_img, threshold1=100, threshold2=200) # Motion Blur size = 15 motion_blur_kernel = np.zeros((size, size)) motion_blur_kernel[int((size-1)/2), :] = np.ones(size) motion_blur_kernel = motion_blur_kernel / size motion_blurred = cv2.filter2D(resized_img, -1, motion_blur_kernel) # Embossing embossing_kernel = np.array([[-2, -1, 0], [-1, 1, 1], [0, 1, 2]]) embossed_img = cv2.filter2D(resized_img, -1, embossing_kernel) if processed_image.ndim == 2: # Convert to BGR for color consistency processed_image = cv2.cvtColor(processed_image, cv2.COLOR_GRAY2BGR) # Convert the image to PIL format and then to ImageTk format processed_image = Image.fromarray(processed_image) processed_image = ImageTk.PhotoImage(processed_image) # If the panel A is None, initialize it if panelA is None: panelA = tk.Label(image=processed_image) panelA.image = processed_image panelA.pack(side="top", padx=10, pady=10) else: # Update the panel panelA.configure(image=processed_image) panelA.image = processed_image # Function to open file dialog and load an image def open_image(): global path path = filedialog.askopenfilename() if path: # If a file was selected process_image('original') # Initialize the window root = tk.Tk() root.title("Edge Detection GUI") # Set the initial width of the window # Set the window to full screen root.attributes('-fullscreen', True) # Initialize panelA (to display the image) panelA = None # Initialize image path path = None def exit_application(): root.destroy() # Create exit button btn_exit = tk.Button(root, text="Exit", command=exit_application) btn_exit.pack(side="bottom", fill="both", expand="yes", padx="10", pady="10") # Create buttons for different edge detection methods btn_sobel = tk.Button(root, text="Sobel", command=lambda: process_image('sobel')) btn_sobel.pack(side='bottom', fill="both", expand="yes", padx="10", pady="10") btn_laplacian = tk.Button(root, text="Laplacian", command=lambda: process_image('laplacian')) btn_laplacian.pack(side='bottom', fill="both", expand="yes", padx="10", pady="10") btn_canny = tk.Button(root, text="Canny", command=lambda: process_image('canny')) btn_canny.pack(side='bottom', fill="both", expand="yes", padx="10", pady="10") btn_blur = tk.Button(root, text="Blur", command=lambda: process_image('blur')) btn_blur.pack(side='bottom', fill="both", expand="yes", padx="10", pady="10") btn_embossing = tk.Button(root, text="Embossing", command=lambda: process_image('embossing')) btn_embossing.pack(side='bottom', fill="both", expand="yes", padx="10", pady="10") # Function to exit the application btn_open = tk.Button(root, text="Open Image", command=open_image) btn_open.pack(side='bottom', fill="both", expand="yes", padx="10", pady="10") # Start the GUI loop root.mainloop()
7b9666ada060fad386487c88186959a6
{ "intermediate": 0.3939693868160248, "beginner": 0.3383312523365021, "expert": 0.26769939064979553 }
37,647
Please assume the role of a unity developer specialising in unity 2020.2.6f1 and visual studio 2017.
e524028940b575d1339d38a2f82b1d05
{ "intermediate": 0.360115110874176, "beginner": 0.2641240656375885, "expert": 0.3757607936859131 }
37,648
Please assume the role of a unity developer specialising in unity 2020.2.6f1 and visual studio 2017. I wish to share some scripts with you. The first is a hoverboard controller a hoverboard is like a skateboard but it hovers. Script 1: using UnityEngine; using UnityEngine.UI; [RequireComponent(typeof(Rigidbody))] public class HoverBoardControl : MonoBehaviour { private float boardslideSoundTime; private bool boardSlideCheck; private bool noseSlideCheck; private bool tailSlideCheck; public bool IsSteeringLeft { get { return isSteeringLeft; } } public bool IsSteeringRight { get { return isSteeringRight; } } [Header("Hover Settings")] public float hoverForce = 1500f; public float hoverHeight = 6f; public GameObject[] hoverPoints; [Header("Movement Settings")] public float forwardAcceleration = 100.0f; public float backwardAcceleration = 25.0f; [SerializeField] private float turnStrength = 10f; // Made it serializable for editor control [Header("UI & Scoring")] public Text scoreText; public int currentScore; public HUD hud; public TrickManager trickManager; [Header("Boost Settings")] public float boostMultiplier = 2.0f; [Header("Rotation Settings")] public float minRotation = -20.0f; public float maxRotation = 0.0f; [Header("Timer Settings")] public float timer = 5.0f; [Header("Miscellaneous")] public bool moving; public GameObject board; [Header("Input Settings")] public KeyCode moveForwardKey = KeyCode.W; public KeyCode moveBackwardKey = KeyCode.S; public KeyCode turnLeftKey = KeyCode.A; public KeyCode turnRightKey = KeyCode.D; [Header("Customizable Keys")] [SerializeField] private KeyCode ollieKey = KeyCode.Space; [SerializeField] private KeyCode boostKey = KeyCode.LeftControl; int layerMask; float currentThrust; float currentTurn; Rigidbody rb; private GrindDetection grindDetection; // Reference to the GrindDetection script public bool IsGrounded { get; set; } public AudioSource GrindAudio { get; set; } public Vector3 BoardTilt { get; set; } private bool isSteeringLeft; private bool isSteeringRight; void Awake() { rb = GetComponent<Rigidbody>(); hud = FindObjectOfType<HUD>(); trickManager = GetComponent<TrickManager>(); grindDetection = GetComponentInChildren<GrindDetection>(); GrindAudio = GetComponent<AudioSource>(); isSteeringLeft = false; isSteeringRight = false; } // Add the following methods to match GrindDetection expectations public void GrindAudioVolumeLerp(float targetVolume, float lerpSpeed) { GrindAudio.volume = Mathf.Lerp(GrindAudio.volume, targetVolume, lerpSpeed * Time.deltaTime); } public void RotateAround(Vector3 point, Vector3 axis, float angle) { rb.transform.RotateAround(point, axis, angle * Time.deltaTime); } public void Rotate(Vector3 axis, float angle, Space relativeTo) { rb.transform.Rotate(axis * angle * Time.deltaTime, relativeTo); } public void TransformLocalRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, targetRotation, lerpSpeed * Time.deltaTime); } public void TransformRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, targetRotation, lerpSpeed * Time.deltaTime); } // Add the following method to start the grind public void StartGrind() { grindDetection.StartGrind(); } void Start() { layerMask = 1 << LayerMask.NameToLayer("Characters"); layerMask = ~layerMask; if (scoreText == null) { GameObject canvas = GameObject.Find("Canvas"); if (canvas != null) { scoreText = Instantiate(Resources.Load("ScoreText", typeof(Text))) as Text; scoreText.transform.SetParent(canvas.transform, false); } else { Debug.LogError("Canvas not found. Make sure there is a Canvas GameObject in the scene."); } } } void OnEnable() { rb.isKinematic = false; rb.useGravity = true; } void OnDisable() { rb.velocity = Vector3.zero; rb.angularVelocity = Vector3.zero; rb.isKinematic = true; } void GetSteeringInput() { isSteeringLeft = Input.GetKey(turnLeftKey); isSteeringRight = Input.GetKey(turnRightKey); } void Update() { GetSteeringInput(); currentThrust = 0.0f; // Adjust currentTurn directly based on input if (isSteeringLeft) { currentTurn = -turnStrength; } else if (isSteeringRight) { currentTurn = turnStrength; } else { currentTurn = 0.0f; } // Allow adjustment of currentThrust from the Unity GUI inspector currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Clamp the value between -1 and 1 scoreText.text = currentScore + " Points"; if (Input.GetKeyDown(ollieKey)) { trickManager.ExecuteOllieOrTrick(1500f); } if (hud != null) { if (Input.GetKeyDown(boostKey) && (hud.BoostInt > 0)) { forwardAcceleration = forwardAcceleration * boostMultiplier; backwardAcceleration = backwardAcceleration * boostMultiplier; if (hud.BoostInt <= 0) { forwardAcceleration = forwardAcceleration / boostMultiplier; backwardAcceleration = backwardAcceleration / boostMultiplier; } } if (Input.GetKey(boostKey)) { hud.BoostInt -= 0.2f; } } Vector3 currentRotation = transform.rotation.eulerAngles; currentRotation.x = Mathf.Clamp(currentRotation.x, minRotation, maxRotation); currentRotation.z = Mathf.Clamp(currentRotation.z, minRotation, maxRotation); transform.rotation = Quaternion.Euler(currentRotation); float aclAxis = Input.GetAxis("Vertical"); if (aclAxis > 0.1f) currentThrust = aclAxis * forwardAcceleration; else if (aclAxis < -0.1f) currentThrust = aclAxis * backwardAcceleration; // Remove the following block of code related to currentTurn // if (Mathf.Abs(currentTurn) > 0.1f) // { // if (isSteeringLeft) // currentTurn = -turnStrength; // else if (isSteeringRight) // currentTurn = turnStrength; // } // currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Remove this line // Check for grinding input if (Input.GetKeyDown(grindDetection.GrindKey)) { StartGrind(); } } void FixedUpdate() { ApplyHover(); ApplyMovement(); ApplyTurning(); ApplyGrinding(); } void ApplyHover() { for (int i = 0; i < hoverPoints.Length; i++) { var hoverPoint = hoverPoints[i]; RaycastHit hoverHit; if (Physics.Raycast(hoverPoint.transform.position, -Vector3.up, out hoverHit, hoverHeight, layerMask)) { rb.AddForceAtPosition(Vector3.up * hoverForce * (1.0f - (hoverHit.distance / hoverHeight)), hoverPoint.transform.position); } else { if (transform.position.y > hoverPoint.transform.position.y) { rb.AddForceAtPosition(hoverPoint.transform.up * hoverForce, hoverPoint.transform.position); } else { rb.AddForceAtPosition(hoverPoint.transform.up * -hoverForce, hoverPoint.transform.position); } } } } void ApplyMovement() { if (Mathf.Abs(currentThrust) > 0) { rb.AddForce(transform.forward * currentThrust); } } void ApplyTurning() { if (currentTurn != 0) { rb.AddRelativeTorque(Vector3.up * currentTurn * turnStrength); } } void ApplyGrinding() { if (grindDetection.IsGrinding()) { // Apply grinding logic here // Access grindDetection properties and methods as needed // Example: Reduce hover height and force during grinding // Add your custom logic here based on the skateboard script if (boardSlideCheck || noseSlideCheck || tailSlideCheck) { boardslideSoundTime += Time.deltaTime; if (boardslideSoundTime > 0f && boardslideSoundTime < 0.1f && !GrindAudio.isPlaying) { GrindAudio.Play(); // Assuming GrindAudio is your AudioSource } } else { boardslideSoundTime = 0f; GrindAudio.Stop(); // Assuming GrindAudio is your AudioSource } // Add the rest of your custom logic from the skateboard script // Adjust rotations, movements, and other parameters as needed } else { // Reset hover height and force when not grinding // Add the rest of your logic for when not grinding // Reset any adjustments made during grinding, if necessary } } } Please wait before responding I have more scripts I wish to share with you.
72325cc1065785696020c9f83c47b781
{ "intermediate": 0.29365620017051697, "beginner": 0.456289142370224, "expert": 0.2500546872615814 }
37,649
nanopore sequencing could be done paired-end?
1dad7c7bb0643cc3411c32759ee3b607
{ "intermediate": 0.2258296012878418, "beginner": 0.12491030991077423, "expert": 0.6492601037025452 }
37,650
# First we'll set the sub-diagonal elements to zero, i.e. A[1,0]. # Next we want the diagonal element to be equal to one. # We'll divide the row by the value of A[1, 1]. # Again, we need to test if this is zero. # If so, we'll add a lower row and repeat setting the sub-diagonal elements to zero. # There is no need to edit this function. def fixRowOne(A) : A[1] = A[1] - A[1,0] * A[0] if A[1,1] == 0 : A[1] = A[1] + A[2] A[1] = A[1] - A[1,0] * A[0] if A[1,1] == 0 : A[1] = A[1] + A[3] A[1] = A[1] - A[1,0] * A[0] if A[1,1] == 0 : raise MatrixIsSingular() A[1] = A[1] / A[1,1] return A # This is the first function that you should complete. # Follow the instructions inside the function at each comment. def fixRowTwo(A) : # Insert code below to set the sub-diagonal elements of row two to zero (there are two of them). # Next we'll test that the diagonal element is not zero. # Insert code below that adds a lower row to row 2. # Now repeat your code which sets the sub-diagonal elements to zero. if A[2,2] == 0 : raise MatrixIsSingular() # Finally set the diagonal element to one by dividing the whole row by that element. return A
a4bc86501355b2042e4d7df7e289d354
{ "intermediate": 0.36685100197792053, "beginner": 0.26397404074668884, "expert": 0.369174987077713 }
37,651
update this code public String getConfigValueFromKey(String configKey) { Query queryGeneralQuery = new Query(); queryGeneralQuery.addCriteria(Criteria.where("key").is("TRANSIT_DAYS_" + configKey)); Config config = mongoTemplate.findOne(queryGeneralQuery, Config.class); return config == null ? "7" : config.getValue(); }.......for mogo document fetching....._id 65a132b6364c0e19c530c3c8 key "IGNORE_PRIMARY_REFUND_REJECTION_REASON" value "RPC000006, RPC000007, RPC000012"
f79f19da4d55a926e01124c14ce09e32
{ "intermediate": 0.4057024121284485, "beginner": 0.2911008298397064, "expert": 0.3031967282295227 }
37,652
Hello!
dd5b52919815e57ec0118749cbc10f7a
{ "intermediate": 0.3194829821586609, "beginner": 0.26423266530036926, "expert": 0.41628435254096985 }
37,653
public String getConfigValueFromKey(String configKey) { Query queryGeneralQuery = new Query(); queryGeneralQuery.addCriteria(Criteria.where("key").is(configKey)); Config config = mongoTemplate.findOne(queryGeneralQuery, Config.class); return } update above code for mongo document fetching and replace hardcoded list IGNORE_PRIMARY_REFUND_REJECTION_REASON…_id 65a132b6364c0e19c530c3c8 key “IGNORE_PRIMARY_REFUND_REJECTION_REASON” value “RPC000006, RPC000007, RPC000012”
0d105bf7c02064567ce91e15420d632d
{ "intermediate": 0.4342176020145416, "beginner": 0.31775856018066406, "expert": 0.2480238527059555 }
37,654
Please assume the role of a unity developer specialising in unity 2020.2.6f1 and visual studio 2017. I wish to share some scripts with you. The first is a hoverboard controller a hoverboard is like a skateboard but it hovers. Script 1: using UnityEngine; using UnityEngine.UI; [RequireComponent(typeof(Rigidbody))] public class HoverBoardControl : MonoBehaviour { private float boardslideSoundTime; private bool boardSlideCheck; private bool noseSlideCheck; private bool tailSlideCheck; public bool IsSteeringLeft { get { return isSteeringLeft; } } public bool IsSteeringRight { get { return isSteeringRight; } } [Header(“Hover Settings”)] public float hoverForce = 1500f; public float hoverHeight = 6f; public GameObject[] hoverPoints; [Header(“Movement Settings”)] public float forwardAcceleration = 100.0f; public float backwardAcceleration = 25.0f; [SerializeField] private float turnStrength = 10f; // Made it serializable for editor control [Header(“UI & Scoring”)] public Text scoreText; public int currentScore; public HUD hud; public TrickManager trickManager; [Header(“Boost Settings”)] public float boostMultiplier = 2.0f; [Header(“Rotation Settings”)] public float minRotation = -20.0f; public float maxRotation = 0.0f; [Header(“Timer Settings”)] public float timer = 5.0f; [Header(“Miscellaneous”)] public bool moving; public GameObject board; [Header(“Input Settings”)] public KeyCode moveForwardKey = KeyCode.W; public KeyCode moveBackwardKey = KeyCode.S; public KeyCode turnLeftKey = KeyCode.A; public KeyCode turnRightKey = KeyCode.D; [Header(“Customizable Keys”)] [SerializeField] private KeyCode ollieKey = KeyCode.Space; [SerializeField] private KeyCode boostKey = KeyCode.LeftControl; int layerMask; float currentThrust; float currentTurn; Rigidbody rb; private GrindDetection grindDetection; // Reference to the GrindDetection script public bool IsGrounded { get; set; } public AudioSource GrindAudio { get; set; } public Vector3 BoardTilt { get; set; } private bool isSteeringLeft; private bool isSteeringRight; void Awake() { rb = GetComponent<Rigidbody>(); hud = FindObjectOfType<HUD>(); trickManager = GetComponent<TrickManager>(); grindDetection = GetComponentInChildren<GrindDetection>(); GrindAudio = GetComponent<AudioSource>(); isSteeringLeft = false; isSteeringRight = false; } // Add the following methods to match GrindDetection expectations public void GrindAudioVolumeLerp(float targetVolume, float lerpSpeed) { GrindAudio.volume = Mathf.Lerp(GrindAudio.volume, targetVolume, lerpSpeed * Time.deltaTime); } public void RotateAround(Vector3 point, Vector3 axis, float angle) { rb.transform.RotateAround(point, axis, angle * Time.deltaTime); } public void Rotate(Vector3 axis, float angle, Space relativeTo) { rb.transform.Rotate(axis * angle * Time.deltaTime, relativeTo); } public void TransformLocalRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, targetRotation, lerpSpeed * Time.deltaTime); } public void TransformRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, targetRotation, lerpSpeed * Time.deltaTime); } // Add the following method to start the grind public void StartGrind() { grindDetection.StartGrind(); } void Start() { layerMask = 1 << LayerMask.NameToLayer(“Characters”); layerMask = ~layerMask; if (scoreText == null) { GameObject canvas = GameObject.Find(“Canvas”); if (canvas != null) { scoreText = Instantiate(Resources.Load(“ScoreText”, typeof(Text))) as Text; scoreText.transform.SetParent(canvas.transform, false); } else { Debug.LogError(“Canvas not found. Make sure there is a Canvas GameObject in the scene.”); } } } void OnEnable() { rb.isKinematic = false; rb.useGravity = true; } void OnDisable() { rb.velocity = Vector3.zero; rb.angularVelocity = Vector3.zero; rb.isKinematic = true; } void GetSteeringInput() { isSteeringLeft = Input.GetKey(turnLeftKey); isSteeringRight = Input.GetKey(turnRightKey); } void Update() { GetSteeringInput(); currentThrust = 0.0f; // Adjust currentTurn directly based on input if (isSteeringLeft) { currentTurn = -turnStrength; } else if (isSteeringRight) { currentTurn = turnStrength; } else { currentTurn = 0.0f; } // Allow adjustment of currentThrust from the Unity GUI inspector currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Clamp the value between -1 and 1 scoreText.text = currentScore + " Points"; if (Input.GetKeyDown(ollieKey)) { trickManager.ExecuteOllieOrTrick(1500f); } if (hud != null) { if (Input.GetKeyDown(boostKey) && (hud.BoostInt > 0)) { forwardAcceleration = forwardAcceleration * boostMultiplier; backwardAcceleration = backwardAcceleration * boostMultiplier; if (hud.BoostInt <= 0) { forwardAcceleration = forwardAcceleration / boostMultiplier; backwardAcceleration = backwardAcceleration / boostMultiplier; } } if (Input.GetKey(boostKey)) { hud.BoostInt -= 0.2f; } } Vector3 currentRotation = transform.rotation.eulerAngles; currentRotation.x = Mathf.Clamp(currentRotation.x, minRotation, maxRotation); currentRotation.z = Mathf.Clamp(currentRotation.z, minRotation, maxRotation); transform.rotation = Quaternion.Euler(currentRotation); float aclAxis = Input.GetAxis(“Vertical”); if (aclAxis > 0.1f) currentThrust = aclAxis * forwardAcceleration; else if (aclAxis < -0.1f) currentThrust = aclAxis * backwardAcceleration; // Remove the following block of code related to currentTurn // if (Mathf.Abs(currentTurn) > 0.1f) // { // if (isSteeringLeft) // currentTurn = -turnStrength; // else if (isSteeringRight) // currentTurn = turnStrength; // } // currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Remove this line // Check for grinding input if (Input.GetKeyDown(grindDetection.GrindKey)) { StartGrind(); } } void FixedUpdate() { ApplyHover(); ApplyMovement(); ApplyTurning(); ApplyGrinding(); } void ApplyHover() { for (int i = 0; i < hoverPoints.Length; i++) { var hoverPoint = hoverPoints[i]; RaycastHit hoverHit; if (Physics.Raycast(hoverPoint.transform.position, -Vector3.up, out hoverHit, hoverHeight, layerMask)) { rb.AddForceAtPosition(Vector3.up * hoverForce * (1.0f - (hoverHit.distance / hoverHeight)), hoverPoint.transform.position); } else { if (transform.position.y > hoverPoint.transform.position.y) { rb.AddForceAtPosition(hoverPoint.transform.up * hoverForce, hoverPoint.transform.position); } else { rb.AddForceAtPosition(hoverPoint.transform.up * -hoverForce, hoverPoint.transform.position); } } } } void ApplyMovement() { if (Mathf.Abs(currentThrust) > 0) { rb.AddForce(transform.forward * currentThrust); } } void ApplyTurning() { if (currentTurn != 0) { rb.AddRelativeTorque(Vector3.up * currentTurn * turnStrength); } } void ApplyGrinding() { if (grindDetection.IsGrinding()) { // Apply grinding logic here // Access grindDetection properties and methods as needed // Example: Reduce hover height and force during grinding // Add your custom logic here based on the skateboard script if (boardSlideCheck || noseSlideCheck || tailSlideCheck) { boardslideSoundTime += Time.deltaTime; if (boardslideSoundTime > 0f && boardslideSoundTime < 0.1f && !GrindAudio.isPlaying) { GrindAudio.Play(); // Assuming GrindAudio is your AudioSource } } else { boardslideSoundTime = 0f; GrindAudio.Stop(); // Assuming GrindAudio is your AudioSource } // Add the rest of your custom logic from the skateboard script // Adjust rotations, movements, and other parameters as needed } else { // Reset hover height and force when not grinding // Add the rest of your logic for when not grinding // Reset any adjustments made during grinding, if necessary } } } Please wait before responding I have more scripts I wish to share with you.
2d5ff515b4863a1b4d0300083f05fff0
{ "intermediate": 0.25194916129112244, "beginner": 0.5155384540557861, "expert": 0.23251241445541382 }
37,655
In my REACT app, I have a Context that I have set up inside this component like so:
4d72724c805f7605d228eedd1217ebaf
{ "intermediate": 0.41925105452537537, "beginner": 0.29426446557044983, "expert": 0.2864845097064972 }
37,656
Hi, In sapui5 tree component, how to realize when I select parent node and all the child node will also be selected and expanded?
4bf12730072325bca5b4f7bb316fc0b0
{ "intermediate": 0.47850197553634644, "beginner": 0.1296815574169159, "expert": 0.3918163776397705 }
37,657
I am facing issue like when selecting the parent node all the child node which are targeting to respective parent node has to be selected automatically. The control is in sapui5, sap.m.tree and the multiselection is on. How it can possible ?
2cbdd75df285bfb800afb49d0147c7bd
{ "intermediate": 0.41933077573776245, "beginner": 0.15913432836532593, "expert": 0.421534925699234 }
37,658
Give simple examples of using this STL structures in C++. Give function list for any: 1. array 2. vector 3. list 4. queue 5. map 6. unordered_map 7. set 8. deque 9. priority_queue 10. multiset 11. STL sort, find example 12. STL iterator example
1e7d8227e3cc790693400fcd37b4bd1d
{ "intermediate": 0.5062105655670166, "beginner": 0.22953106462955475, "expert": 0.26425832509994507 }
37,659
can google map find nearest mrt or lrt with distance using api
1fb53cc3cb171ba3ca6aa2bc7c26de17
{ "intermediate": 0.5023931264877319, "beginner": 0.17287598550319672, "expert": 0.32473084330558777 }
37,660
Привет! У меня есть скрипт selenium: import time from selenium import webdriver from selenium.webdriver.common.by import By from selenium.webdriver.support.ui import WebDriverWait from selenium.webdriver.support import expected_conditions as EC # Настройте путь к вашему драйверу (если он не в PATH) # browser = webdriver.Chrome(executable_path=r'/path/to/chromedriver') # или просто browser = webdriver.Chrome() # Откройте сайт browser.get('https://stresse.ru/hub') # После входа в систему, дайте пользователю время для ввода данных print("Пожалуйста, введите нужные данные на сайте, а затем нажмите Enter в этом окне консоли.") input("Нажмите Enter, когда будете готовы продолжить…") try: while True: # Нажать на кнопку "do kitties" do_kitties_button = WebDriverWait(browser, 10).until( EC.element_to_be_clickable((By.ID, "ff6a63")) ) do_kitties_button.click() # Подтвердите нажатием кнопки "OK" (ждем ее появления) ok_button = WebDriverWait(browser, 10).until( EC.element_to_be_clickable((By.CSS_SELECTOR, "button.swal2-confirm.btn.btn-link")) ) ok_button.click() # Подождите 10 минут до следующего нажатия time.sleep(10 * 60) except KeyboardInterrupt: # Пользователь может остановить скрипт вручную print("Скрипт остановлен пользователем.") browser.quit() except Exception as e: # Выход в случае любых других ошибок print("Произошла ошибка:", e) browser.quit() После нажатия Enter он выдает: Произошла ошибка: Message: Stacktrace: GetHandleVerifier [0x00007FF6A4B02142+3514994] (No symbol) [0x00007FF6A4720CE2] (No symbol) [0x00007FF6A45C76AA] (No symbol) [0x00007FF6A4611860] (No symbol) [0x00007FF6A461197C] (No symbol) [0x00007FF6A4654EE7] (No symbol) [0x00007FF6A463602F] (No symbol) [0x00007FF6A46528F6] (No symbol) [0x00007FF6A4635D93] (No symbol) [0x00007FF6A4604BDC] (No symbol) [0x00007FF6A4605C64] GetHandleVerifier [0x00007FF6A4B2E16B+3695259] GetHandleVerifier [0x00007FF6A4B86737+4057191] GetHandleVerifier [0x00007FF6A4B7E4E3+4023827] GetHandleVerifier [0x00007FF6A48504F9+689705] (No symbol) [0x00007FF6A472C048] (No symbol) [0x00007FF6A4728044] (No symbol) [0x00007FF6A47281C9] (No symbol) [0x00007FF6A47188C4] BaseThreadInitThunk [0x00007FFE3D967344+20] RtlUserThreadStart [0x00007FFE3E9C26B1+33] Process finished with exit code 0
a204fe314afe4191a40388f8d69c4d9a
{ "intermediate": 0.2570298910140991, "beginner": 0.5022079944610596, "expert": 0.24076206982135773 }
37,661
Please be an expert in SAPUI5 development. And answer my question with example codes that works.
c8337b530b7ed421b7ee56a109f83414
{ "intermediate": 0.4003104567527771, "beginner": 0.2581203579902649, "expert": 0.3415692150592804 }
37,662
Please tell me how are these tools used with golang: reflect, mod, goose, httprouter . Please provide typical use cases and some examples with descriptions and also links to best resources on the internet to learn more about how golang is used with each of these instruments. Thanks!
da5a51a48ea6dd86bc5c2c3fc93ab24c
{ "intermediate": 0.6000447869300842, "beginner": 0.16626600921154022, "expert": 0.23368924856185913 }
37,663
Please assume the role of a unity developer specialising in unity 2020.2.6f1 and visual studio 2017. I am going to share with you some scripts relating to ramp detection in a hoverboard game. It should be noted there may be references to being grounded which should not happen due to the hoverboard always being a set height hovering in the air. The issue is when detecting a ramp, the board just goes up it without adjusting its x rotation to match with the ramps angle. Script 1: using UnityEngine; using UnityEngine.UI; [RequireComponent(typeof(Rigidbody))] public class HoverBoardControl : MonoBehaviour { private float boardslideSoundTime; private bool boardSlideCheck; private bool noseSlideCheck; private bool tailSlideCheck; public bool IsSteeringLeft { get { return isSteeringLeft; } } public bool IsSteeringRight { get { return isSteeringRight; } } [Header("Hover Settings")] public float hoverForce = 1500f; public float hoverHeight = 6f; public GameObject[] hoverPoints; [Header("Movement Settings")] public float forwardAcceleration = 100.0f; public float backwardAcceleration = 25.0f; [SerializeField] private float turnStrength = 10f; // Made it serializable for editor control [Header("UI & Scoring")] public Text scoreText; public int currentScore; public HUD hud; public TrickManager trickManager; [Header("Boost Settings")] public float boostMultiplier = 2.0f; [Header("Rotation Settings")] public float minRotation = -20.0f; public float maxRotation = 0.0f; [Header("Timer Settings")] public float timer = 5.0f; [Header("Miscellaneous")] public bool moving; public GameObject board; [Header("Input Settings")] public KeyCode moveForwardKey = KeyCode.W; public KeyCode moveBackwardKey = KeyCode.S; public KeyCode turnLeftKey = KeyCode.A; public KeyCode turnRightKey = KeyCode.D; [Header("Customizable Keys")] [SerializeField] private KeyCode ollieKey = KeyCode.Space; [SerializeField] private KeyCode boostKey = KeyCode.LeftControl; int layerMask; float currentThrust; float currentTurn; Rigidbody rb; private GrindDetection grindDetection; // Reference to the GrindDetection script public bool IsGrounded { get; set; } public AudioSource GrindAudio { get; set; } public Vector3 BoardTilt { get; set; } private bool isSteeringLeft; private bool isSteeringRight; void Awake() { rb = GetComponent<Rigidbody>(); hud = FindObjectOfType<HUD>(); trickManager = GetComponent<TrickManager>(); grindDetection = GetComponentInChildren<GrindDetection>(); GrindAudio = GetComponent<AudioSource>(); isSteeringLeft = false; isSteeringRight = false; } // Add the following methods to match GrindDetection expectations public void GrindAudioVolumeLerp(float targetVolume, float lerpSpeed) { GrindAudio.volume = Mathf.Lerp(GrindAudio.volume, targetVolume, lerpSpeed * Time.deltaTime); } public void RotateAround(Vector3 point, Vector3 axis, float angle) { rb.transform.RotateAround(point, axis, angle * Time.deltaTime); } public void Rotate(Vector3 axis, float angle, Space relativeTo) { rb.transform.Rotate(axis * angle * Time.deltaTime, relativeTo); } public void TransformLocalRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, targetRotation, lerpSpeed * Time.deltaTime); } public void TransformRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, targetRotation, lerpSpeed * Time.deltaTime); } // Add the following method to start the grind public void StartGrind() { grindDetection.StartGrind(); } // Add the following method to set the hover height public void SetHoverHeight(float height) { hoverHeight = height; } void Start() { layerMask = 1 << LayerMask.NameToLayer("Characters"); layerMask = ~layerMask; if (scoreText == null) { GameObject canvas = GameObject.Find("Canvas"); if (canvas != null) { scoreText = Instantiate(Resources.Load("ScoreText", typeof(Text))) as Text; scoreText.transform.SetParent(canvas.transform, false); } else { Debug.LogError("Canvas not found. Make sure there is a Canvas GameObject in the scene."); } } } void OnEnable() { rb.isKinematic = false; rb.useGravity = true; } void OnDisable() { rb.velocity = Vector3.zero; rb.angularVelocity = Vector3.zero; rb.isKinematic = true; } void GetSteeringInput() { isSteeringLeft = Input.GetKey(turnLeftKey); isSteeringRight = Input.GetKey(turnRightKey); } void Update() { GetSteeringInput(); currentThrust = 0.0f; // Adjust currentTurn directly based on input if (isSteeringLeft) { currentTurn = -turnStrength; } else if (isSteeringRight) { currentTurn = turnStrength; } else { currentTurn = 0.0f; } // Allow adjustment of currentThrust from the Unity GUI inspector currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Clamp the value between -1 and 1 scoreText.text = currentScore + " Points"; if (Input.GetKeyDown(ollieKey)) { trickManager.ExecuteOllieOrTrick(1500f); } if (hud != null) { if (Input.GetKeyDown(boostKey) && (hud.BoostInt > 0)) { forwardAcceleration = forwardAcceleration * boostMultiplier; backwardAcceleration = backwardAcceleration * boostMultiplier; if (hud.BoostInt <= 0) { forwardAcceleration = forwardAcceleration / boostMultiplier; backwardAcceleration = backwardAcceleration / boostMultiplier; } } if (Input.GetKey(boostKey)) { hud.BoostInt -= 0.2f; } } Vector3 currentRotation = transform.rotation.eulerAngles; currentRotation.x = Mathf.Clamp(currentRotation.x, minRotation, maxRotation); currentRotation.z = Mathf.Clamp(currentRotation.z, minRotation, maxRotation); transform.rotation = Quaternion.Euler(currentRotation); float aclAxis = Input.GetAxis("Vertical"); if (aclAxis > 0.1f) currentThrust = aclAxis * forwardAcceleration; else if (aclAxis < -0.1f) currentThrust = aclAxis * backwardAcceleration; // Remove the following block of code related to currentTurn // if (Mathf.Abs(currentTurn) > 0.1f) // { // if (isSteeringLeft) // currentTurn = -turnStrength; // else if (isSteeringRight) // currentTurn = turnStrength; // } // currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Remove this line // Check for grinding input if (Input.GetKeyDown(grindDetection.GrindKey)) { StartGrind(); } } void FixedUpdate() { ApplyHover(); ApplyMovement(); ApplyTurning(); ApplyGrinding(); } void ApplyHover() { for (int i = 0; i < hoverPoints.Length; i++) { var hoverPoint = hoverPoints[i]; RaycastHit hoverHit; if (Physics.Raycast(hoverPoint.transform.position, -Vector3.up, out hoverHit, hoverHeight, layerMask)) { rb.AddForceAtPosition(Vector3.up * hoverForce * (1.0f - (hoverHit.distance / hoverHeight)), hoverPoint.transform.position); } else { if (transform.position.y > hoverPoint.transform.position.y) { rb.AddForceAtPosition(hoverPoint.transform.up * hoverForce, hoverPoint.transform.position); } else { rb.AddForceAtPosition(hoverPoint.transform.up * -hoverForce, hoverPoint.transform.position); } } } } void ApplyMovement() { if (Mathf.Abs(currentThrust) > 0) { rb.AddForce(transform.forward * currentThrust); } } void ApplyTurning() { if (currentTurn != 0) { rb.AddRelativeTorque(Vector3.up * currentTurn * turnStrength); } } void ApplyGrinding() { if (grindDetection.IsGrinding()) { // Apply grinding logic here // Access grindDetection properties and methods as needed // Example: Reduce hover height and force during grinding SetHoverHeight(hoverHeight * 0.5f); // Adjust the hover height during grinding // Add your custom logic here based on the skateboard script if (boardSlideCheck || noseSlideCheck || tailSlideCheck) { boardslideSoundTime += Time.deltaTime; if (boardslideSoundTime > 0f && boardslideSoundTime < 0.1f && !GrindAudio.isPlaying) { GrindAudio.Play(); // Assuming GrindAudio is your AudioSource } } else { boardslideSoundTime = 0f; GrindAudio.Stop(); // Assuming GrindAudio is your AudioSource } // Add the rest of your custom logic from the skateboard script // Adjust rotations, movements, and other parameters as needed } else { // Reset hover height and force when not grinding SetHoverHeight(hoverHeight); // Reset the hover height when not grinding // Add the rest of your logic for when not grinding // Reset any adjustments made during grinding, if necessary } } } Please wait before responding, I have more scripts I wish to share with you.
79cf3b45bdbb2bd3204f18af72d843eb
{ "intermediate": 0.33342576026916504, "beginner": 0.45085057616233826, "expert": 0.21572363376617432 }
37,664
I am facing issue like when selecting the parent node all the child node which are targeting to respective parent node has to be selected automatically, and if the parent node is not expanded, also expand it. The control is sap.m.tree and the multiselection is on. How it can possible ?
4b30543fc2e3ba0b326b10046e776c9e
{ "intermediate": 0.38225165009498596, "beginner": 0.16666440665721893, "expert": 0.4510839581489563 }
37,665
Оптимизируй код:
574eddc4337684ba193180fd8f1a8012
{ "intermediate": 0.31547054648399353, "beginner": 0.26313963532447815, "expert": 0.4213898479938507 }
37,666
I have this code that uses draft-js and markdown-draft-js libraries. I want you to analyze it and tell me what it does. const addPastedContent = (input: any, state: EditorState): void => { const inputLength = state.getCurrentContent().getPlainText().length; const remainingLength = MAX_MESSAGE_LENGTH - inputLength; const converted = convertFromRaw( markdownToDraft(input.slice(0, remainingLength), { remarkableOptions: { disable: { inline: ['links', 'emphasis'], block: ['heading'], }, }, }), ); const newState = Modifier.replaceWithFragment( state.getCurrentContent(), state.getSelection(), converted.getBlockMap(), ); const newEditorState = EditorState.push( editorState, newState, 'insert-characters', ); handleChange(EditorState.forceSelection(newEditorState, newEditorState.getSelection())); };
361bb4ff80ac24eabddf4817ded353b5
{ "intermediate": 0.6118484139442444, "beginner": 0.2145070731639862, "expert": 0.1736445426940918 }
37,667
how to apartly remove a text with regular expression in exel
a1ff91481c9fbc92027de44bbaea48c0
{ "intermediate": 0.31155866384506226, "beginner": 0.2760995626449585, "expert": 0.41234177350997925 }
37,668
Please assume the role of a unity developer specialising in unity 2020.2.6f1 and visual studio 2017. This is script 1 hoverboard ramp detection script. Since applying it the script seems to believe everything is a surface to attach to like a ramp. It throws you wildly around if you accidentally touch something not Tagged Ramp: Script 1 using System.Collections; using UnityEngine; public class RampDetection : MonoBehaviour { [Header("Ramp Detection Settings")] [SerializeField] private Transform[] rampDetectionPoints; [SerializeField] private float detectionRadius = 2f; [SerializeField] private float minRampAngle = 80f; [SerializeField] private float maxRampAngle = 100f; [SerializeField] private float rotationSpeed = 5f; // Speed of rotation adjustment private HoverBoardControl hoverBoardControl; private Rigidbody rb; private void Awake() { hoverBoardControl = GetComponentInParent<HoverBoardControl>(); rb = GetComponentInParent<Rigidbody>(); if (rampDetectionPoints == null || rampDetectionPoints.Length == 0) { Debug.LogError("RampDetectionPoints have not been assigned in the Inspector", this); } } private void FixedUpdate() { foreach (var detectionPoint in rampDetectionPoints) { DetectRamp(detectionPoint); } } private void DetectRamp(Transform detectionPoint) { if (detectionPoint != null) { RaycastHit hit; if (Physics.Raycast(detectionPoint.position, -Vector3.up, out hit, detectionRadius)) { if (hit.collider.gameObject.layer == LayerMask.NameToLayer("Ramp")) { Vector3 rampNormalProjection = Vector3.ProjectOnPlane(hit.normal, Vector3.up); float rampAngle = Vector3.Angle(detectionPoint.forward, rampNormalProjection); if (rampAngle >= minRampAngle && rampAngle <= maxRampAngle) { AdjustHoverboard(hit, detectionPoint.name); } } } } } private void AdjustHoverboard(RaycastHit hit, string hitPart) { Debug.Log(hitPart + " detected a ramp collision"); // Calculate the target alignment of the hoverboard based on the ramp normal Vector3 forwardDirection = Vector3.Cross(hit.normal, Vector3.Cross(Vector3.up, hit.normal)).normalized; Quaternion targetRotation = Quaternion.LookRotation(forwardDirection, hit.normal); // Smoothly lerp the hoverboard’s rotation to the target rotation StartCoroutine(SmoothRotateOverTime(rb.transform, targetRotation, rotationSpeed)); // Set IsGrounded to true to simulate physics behavior while on the ramp hoverBoardControl.IsGrounded = true; // Adjust hover height based on ramp collision float hoverHeight = Mathf.Max(hit.distance, hoverBoardControl.hoverHeight); hoverBoardControl.SetHoverHeight(hoverHeight); } private IEnumerator SmoothRotateOverTime(Transform transform, Quaternion targetRotation, float rotateSpeed) { while (Quaternion.Angle(transform.rotation, targetRotation) > 0.1f) { transform.rotation = Quaternion.Slerp(transform.rotation, targetRotation, rotateSpeed * Time.deltaTime); yield return new WaitForSeconds(Time.fixedDeltaTime); // Wait for next physics update } } } Script 2: using UnityEngine; public class Ramp : MonoBehaviour { [Header("Ramp Transforms")] [SerializeField] private Transform[] topTransforms; // Top points of the ramp [SerializeField] private Transform[] bottomTransforms; // Bottom points of the ramp [Header("Ramp Properties")] [SerializeField] private float rampBoost = 1.0f; // Boost multiplier for going uphill [SerializeField] private float downhillBoost = 1.5f; // Speed multiplier for going downhill [SerializeField] private bool allowRollDownhill = true; // Allow the board to roll downhill // The angle of the ramp, editable in the inspector but also calculated at start [SerializeField, Range(0, 90)] private float rampAngle = 30f; private void Start() { CalculateRampAngle(); } // This method calculates the ramp angle based on the provided top and bottom transforms private void CalculateRampAngle() { if (topTransforms.Length == 0 || bottomTransforms.Length == 0) { Debug.LogWarning("TopTransforms and / or BottomTransforms not set on Ramp script."); return; } // Example calculation for a simple ramp // This should be adjusted based on your specific ramp geometry Vector3 rampDirection = (topTransforms[0].position - bottomTransforms[0].position).normalized; rampAngle = Vector3.Angle(rampDirection, Vector3.up); } public float GetRampBoost() { return rampBoost; } public float GetDownhillBoost() { return downhillBoost; } public bool ShouldAllowRollDownhill() { return allowRollDownhill; } public float GetRampAngle() { return rampAngle; } } Please wait for my query before responding.
6fc1453c5eb8abb096ef906068de1e3f
{ "intermediate": 0.3320530652999878, "beginner": 0.4040016829967499, "expert": 0.2639452815055847 }
37,669
Please assume the role of a unity developer specialising in unity 2020.2.6f1 and visual studio 2017. I seperated a script earlier now I want to merge them together again. Script 1 is the main script that I want all the logic and functionality to replace the relevant parts of the script related to ramp detection. Script 1: using UnityEngine; public class GrindDetection : MonoBehaviour { [Header("Rail Settings")] [SerializeField] private float railDetectionRadius = 2f; private Rail rail; private HoverBoardControl hoverBoardControl; [Header("Trick Colliders")] [SerializeField] private Transform frontTruckTransform; [SerializeField] private Transform backTruckTransform; [SerializeField] private Transform boardMiddleTransform; [SerializeField] private Transform noseTransform; [SerializeField] private Transform tailTransform; private GrindTrigger frontTruck => frontTruckTransform.GetComponent<GrindTrigger>(); private GrindTrigger backTruck => backTruckTransform.GetComponent<GrindTrigger>(); private GrindTrigger boardMiddle => boardMiddleTransform.GetComponent<GrindTrigger>(); private GrindTrigger nose => noseTransform.GetComponent<GrindTrigger>(); private GrindTrigger tail => tailTransform.GetComponent<GrindTrigger>(); [Header("Settings")] [SerializeField] private float grindControlSensitivity = 1f; [SerializeField] private float grindSpeed = 50f; [Header("Input Settings")] [SerializeField] private KeyCode grindKey = KeyCode.G; [Header("Ramp Detection Settings")] [SerializeField] private float minRampAngle = 80f; [SerializeField] private float maxRampAngle = 100f; private Collider[] railColliders; private Collider railCollider; private Rigidbody rb; private float sidewaysAngle; private float frontsideBacksideAngle; private bool railTravelBackwards; private bool isGrindingOrSliding; private void Awake() { hoverBoardControl = GetComponentInParent<HoverBoardControl>(); rb = GetComponentInParent<Rigidbody>(); } private void FixedUpdate() { GetRail(); GrindSlideCheck(); GetRotationRelativeToCamera(); RampDetection(); } private void GetRail() { railColliders = Physics.OverlapSphere(rb.position, railDetectionRadius, LayerMask.GetMask("Rail")); if (railColliders.Length != 0) { railCollider = railColliders[0]; rail = railCollider.GetComponent<Rail>(); } if (!isGrindingOrSliding && hoverBoardControl.IsGrounded) { railColliders = null; } } private void GetRotationRelativeToCamera() { Vector3 forward = hoverBoardControl.transform.forward; Vector3 forward2 = rb.transform.forward; sidewaysAngle = Vector3.Dot(forward, forward2); frontsideBacksideAngle = Vector3.Dot(forward, rb.transform.right); } private void GrindSlideCheck() { if (railCollider == null) { return; } float num = Vector3.Dot(hoverBoardControl.transform.forward, rail.transform.forward); if (num > 0f) { railTravelBackwards = false; } else if (num < 0f) { railTravelBackwards = true; } bool boardSlideCheck = boardMiddle.Trigger; bool frontTruckGrindCheck = frontTruck.Trigger; bool backTruckGrindCheck = backTruck.Trigger; bool noseSlideCheck = nose.Trigger; bool tailSlideCheck = tail.Trigger; if (frontTruckGrindCheck || backTruckGrindCheck) { if (sidewaysAngle > 0.3f || sidewaysAngle < -0.3f) { rb.isKinematic = true; hoverBoardControl.IsGrounded = false; hoverBoardControl.GrindAudioVolumeLerp(1f, 20f); float num2 = hoverBoardControl.BoardTilt.x * grindControlSensitivity; float num3 = hoverBoardControl.BoardTilt.y * grindControlSensitivity; float num4 = 50f; if (railTravelBackwards) { num3 = -num3; } if (backTruckGrindCheck && frontTruckGrindCheck) { rb.transform.RotateAround(backTruckTransform.position, new Vector3(0f, 1f, 0f), num2 * Time.deltaTime); rb.transform.RotateAround(backTruckTransform.position, rail.transform.right, num3 * Time.deltaTime); } if (backTruckGrindCheck) { RotateWithSteering(backTruckTransform, num2, num3, num4); } else if (frontTruckGrindCheck) { RotateWithSteering(frontTruckTransform, num2, num3, num4); } } } else if (tailSlideCheck || noseSlideCheck) { rb.isKinematic = true; hoverBoardControl.IsGrounded = false; hoverBoardControl.GrindAudioVolumeLerp(0f, 20f); float num5 = 50f; float num6 = 5f; rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, Quaternion.Euler(new Vector3(rb.transform.localEulerAngles.x, rb.transform.localEulerAngles.y, 0f)), num6 * Time.deltaTime); float num7 = hoverBoardControl.BoardTilt.y * grindControlSensitivity; if (railTravelBackwards) { num7 = -num7; } if (noseSlideCheck) { RotateWithSteering(noseTransform, 0f, num7, num5); } else if (tailSlideCheck) { RotateWithSteering(tailTransform, 0f, num7, num5); } } else if (boardSlideCheck) { rb.isKinematic = true; hoverBoardControl.IsGrounded = false; hoverBoardControl.GrindAudioVolumeLerp(1f, 20f); float num8 = -rail.transform.localEulerAngles.x; if (frontsideBacksideAngle < -0.3) { num8 = -num8; } if (railTravelBackwards) { num8 = -num8; } rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, Quaternion.Euler(new Vector3(num8, rb.transform.localEulerAngles.y, 0f)), 5f * Time.deltaTime); float num9 = 50f; if (railTravelBackwards) { num9 = -num9; } rb.transform.Rotate(Vector3.up * grindSpeed * Time.deltaTime, Space.World); Vector3 position = rb.position; float num10 = 0.1f; position = Vector3.Lerp(position, railCollider.ClosestPoint(position), num10); rb.position = position; hoverBoardControl.transform.localRotation = rb.transform.localRotation; hoverBoardControl.transform.rotation = rb.transform.rotation; if (boardMiddle.Trigger) { StartGrind(); // Add this line to start the grind } } else { rb.isKinematic = true; hoverBoardControl.IsGrounded = false; hoverBoardControl.GrindAudioVolumeLerp(0f, 20f); Vector3 position2 = rb.position; position2 = Vector3.Lerp(position2, railCollider.ClosestPoint(position2), 0.1f * Time.deltaTime); rb.position = position2; rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, Quaternion.Euler(0f, rb.transform.eulerAngles.y, 0f), 20f * Time.deltaTime); hoverBoardControl.transform.localRotation = rb.transform.localRotation; hoverBoardControl.transform.rotation = rb.transform.rotation; if (tail.Trigger || nose.Trigger) { // Adjust rotations and movements as needed } } } private void RotateWithSteering(Transform truckTransform, float num2, float num3, float num4) { rb.transform.RotateAround(truckTransform.position, new Vector3(0f, 1f, 0f), num2 * Time.deltaTime); rb.transform.RotateAround(truckTransform.position, rail.transform.right, num3 * Time.deltaTime); if (hoverBoardControl.IsSteeringLeft) { rb.transform.RotateAround(truckTransform.position, new Vector3(0f, 1f, 0f), (0f - num4) * Time.deltaTime); } else if (hoverBoardControl.IsSteeringRight) { rb.transform.RotateAround(truckTransform.position, new Vector3(0f, 1f, 0f), num4 * Time.deltaTime); } } private void RampDetection() { if (railCollider == null) { return; } RaycastHit hit; if (Physics.Raycast(rail.transform.position, rail.transform.TransformDirection(Vector3.forward), out hit, railDetectionRadius)) { float num = Vector3.Dot(hoverBoardControl.transform.forward, rail.transform.forward); if (num > 0f && hit.collider.gameObject.layer == LayerMask.NameToLayer("Ramp")) { Vector3 vector = Vector3.ProjectOnPlane(hit.normal, Vector3.up); float num2 = Vector3.Angle(vector, hoverBoardControl.transform.forward); if (num2 > minRampAngle && num2 < maxRampAngle) { hoverBoardControl.IsGrounded = true; rb.isKinematic = false; // Apply rotation to the hoverboard based on the ramp angle float angleDifference = Mathf.DeltaAngle(hoverBoardControl.transform.rotation.eulerAngles.x, num2); Quaternion newRotation = Quaternion.Euler(num2, rb.transform.eulerAngles.y, 0f); rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, newRotation, 5f * Time.deltaTime); } } } } public bool IsGrinding() { return isGrindingOrSliding; } public KeyCode GrindKey { get { return grindKey; } } // Add the StartGrind method public void StartGrind() { isGrindingOrSliding = true; } } Script 2: using System.Collections; using UnityEngine; public class RampDetection : MonoBehaviour { [Header("Ramp Detection Settings")] [SerializeField] private Transform[] rampDetectionPoints; [SerializeField] private float detectionRadius = 2f; [SerializeField] private float minRampAngle = 80f; [SerializeField] private float maxRampAngle = 100f; [SerializeField] private float rotationSpeed = 5f; // Speed of rotation adjustment private HoverBoardControl hoverBoardControl; private Rigidbody rb; private void Awake() { hoverBoardControl = GetComponentInParent<HoverBoardControl>(); rb = GetComponentInParent<Rigidbody>(); if (rampDetectionPoints == null || rampDetectionPoints.Length == 0) { Debug.LogError("RampDetectionPoints have not been assigned in the Inspector", this); } } private void FixedUpdate() { foreach (var detectionPoint in rampDetectionPoints) { DetectRamp(detectionPoint); } } private void DetectRamp(Transform detectionPoint) { if (detectionPoint != null) { RaycastHit hit; if (Physics.Raycast(detectionPoint.position, -Vector3.up, out hit, detectionRadius)) { if (hit.collider.gameObject.layer == LayerMask.NameToLayer("Ramp")) { Vector3 rampNormalProjection = Vector3.ProjectOnPlane(hit.normal, Vector3.up); float rampAngle = Vector3.Angle(detectionPoint.forward, rampNormalProjection); if (rampAngle >= minRampAngle && rampAngle <= maxRampAngle) { AdjustHoverboard(hit, detectionPoint.name); } } } } } private void AdjustHoverboard(RaycastHit hit, string hitPart) { Debug.Log(hitPart + " detected a ramp collision"); // Calculate the target alignment of the hoverboard based on the ramp normal Vector3 forwardDirection = Vector3.Cross(hit.normal, Vector3.Cross(Vector3.up, hit.normal)).normalized; Quaternion targetRotation = Quaternion.LookRotation(forwardDirection, hit.normal); // Smoothly lerp the hoverboard’s rotation to the target rotation StartCoroutine(SmoothRotateOverTime(rb.transform, targetRotation, rotationSpeed)); // Set IsGrounded to true to simulate physics behavior while on the ramp hoverBoardControl.IsGrounded = true; // Adjust hover height based on ramp collision float hoverHeight = Mathf.Max(hit.distance, hoverBoardControl.hoverHeight); hoverBoardControl.SetHoverHeight(hoverHeight); } private IEnumerator SmoothRotateOverTime(Transform transform, Quaternion targetRotation, float rotateSpeed) { while (Quaternion.Angle(transform.rotation, targetRotation) > 0.1f) { transform.rotation = Quaternion.Slerp(transform.rotation, targetRotation, rotateSpeed * Time.deltaTime); yield return new WaitForSeconds(Time.fixedDeltaTime); // Wait for next physics update } } }
323596c97e2e6862396fed0ea68079b5
{ "intermediate": 0.3147437572479248, "beginner": 0.44400453567504883, "expert": 0.24125172197818756 }
37,670
!pip install openai
579dacbe69e8a521c2a535cbabd83c86
{ "intermediate": 0.4070722162723541, "beginner": 0.14506134390830994, "expert": 0.4478664696216583 }
37,671
In my python FastAPI backend code, I have a crud function for adding images:
db548f8fba209a61b48472a948097f50
{ "intermediate": 0.5812699794769287, "beginner": 0.21541538834571838, "expert": 0.20331455767154694 }
37,672
I am getting these two warnings:
b310f012f33f60f6a56b0b6531297933
{ "intermediate": 0.30478665232658386, "beginner": 0.2624671459197998, "expert": 0.4327462315559387 }
37,673
how to create a RESTful API. please provide explanation for beginners, examples and links to best resources to learn from scratch
cf1d53f59f51185e10f357a26cc4575c
{ "intermediate": 0.839635968208313, "beginner": 0.08986936509609222, "expert": 0.07049473375082016 }
37,674
Instructions: The project focuses on the use of FMCW radar. It is used for data collection, point detection, tracking and other functions. The project is broken down into several modules. The aim of this division is to decouple as much as possible the functionalities present in each module, and make the development of new functionalities more flexible and rapid. Some information will be presented to you in json format: - module: the name of the module - module_structure: the structure of the module (the files making up the module and their hierarchy) - module_files_already_generated_doc: the documentation of the module's files if already generated - other_modules_doc: the documentation of the other modules on which the module depends (possibly none at all) - gen_doc_of_file: the file you will be given to generate the documentation of. If no file is given, the documentation of the entire module must be generated. Your goal is to create a markdown documentation of the file you will be given, or the entire module if no file is given. This documentation is intended to guide a developer who is new to the project, you can therefore add whatever you feel is relevant for this task. Informations: {"module": "radar_area_detection", "module_structure": "radar_area_detection/detection.py;radar_area_detection/display.py;radar_area_detection/main.py;radar_area_detection\\comm/camera_alert.py;radar_area_detection\\comm/rabbitmq.py;radar_area_detection\\conf/6432_ti_config.cfg;radar_area_detection\\conf/6843_ti_config.cfg;radar_area_detection\\conf/detection_conf.json;radar_area_detection\\conf/main.json;radar_area_detection\\conf/rabbitmq_conf.json;radar_area_detection\\conf/tracking_conf.json;", "module_files_already_generated_doc": [{"file": "docs\\radar_area_detection\\display.md", "doc": "# radar_area_detection/display.py Documentation\n\n## Overview\n\nThe `display.py` script is part of the [radar_area_detection](radar_area_detection.md) module, which integrates with the area detection and object tracking functionalities. This script specifically provides visualization capabilities, extending the `TrackingDisplay2D` class from the radar_tracking module to represent detection areas and the status of those areas (either as \"ok\" or under \"alert\").\n\n## File Structure\n\ndisplay.py includes the following class:\n\n- `AreasDisplay2D`: Extends the `TrackingDisplay2D` class to display detection areas and denote their status on a 2D plot.\n\n### AreasDisplay2D Class\n\nThe `AreasDisplay2D` class is responsible for visualizing areas on a 2D display, differentiating between areas that are clear (ok) and those that have an active alert. It allows real-time updates of the visualization as the status of the areas changes according to the radar data.\n\n#### Constructor\n\n- **\\_\\_init__(self, *args, \\*\\*kwargs)**: Initializes the `AreasDisplay2D` class with arguments passed to its superclass and initializes an empty list for patches.\n\n#### Attributes\n\n- **patches**: List. Stores the drawing patches corresponding to areas on the display.\n\n#### Methods\n\n- **_prepare_draw(self, clusters, ok_areas: List[Area], alert_areas: List[Area])**: Internally prepares the drawing of the clusters and areas. It removes any existing patches and re-adds them according to the updated area states.\n- **show(self, clusters, area_detector: AreaDetector)**: Public method to display the radar clusters and areas. This method calls _prepare_draw with the ok_areas and alert_areas from an AreaDetector instance and then proceeds to draw the updated visualization.\n\n### Integration with Other Modules\n\nThe detect.py script in the [radar_area_detection](radar_area_detection.md) module interacts with the AreasDisplay2D class to visualize active areas and alerts. Moreover, the AreasDisplay2D class uses functionality from the radar_tracking module\u00e2\u20ac\u2122s visualization utilities, signifying how various modules within the system come together to fulfill the project\u00e2\u20ac\u2122s overall objectives.\n\n## Usage\n\nDevelopers can instantiate the `AreasDisplay2D` class to visualise the area detection results in the context of FMCW radar applications. This enables a graphical representation of which areas are clear (ok) and which are under alert.\n\n## Data Flow\n\n1. An instance of `AreasDisplay2D` is created with appropriate arguments.\n2. Radar data (clusters) and area statuses are processed by an `AreaDetector`.\n3. Area statuses and clusters are passed to the `AreasDisplay2D` instance\u00e2\u20ac\u2122s show method.\n4. The show method updates the visual representation according to the current state of detection areas and tracked clusters.\n\n## Developer Notes\n\n- Use the `AreasDisplay2D` class to create a visual feedback loop for area detection and alert visualization in real-time radar applications.\n- Ensure active areas and their statuses are correctly updated and reflected in the visualization to provide continuous situational awareness.\n- The visual representation is crucial for debugging and monitoring the radar area detection functionality, hence strive for clear visual distinction between different states for better readability and interpretation.\n\n## Conclusion\n\n`display.py` within the [radar_area_detection](radar_area_detection.md) module serves a pivotal role in visually representing the status of detection areas in real-time. The AreasDisplay2D class demonstrates an effective integration of visualization techniques with radar detection and tracking capabilities. It contributes to the wider goal of creating a modular and adaptable system for FMCW radar applications.\n\n[Radar Area Detection](radar_area_detection.md)"}, {"file": "docs\\radar_area_detection\\comm\\camera_alert.md", "doc": "# radar_area_detection/comm/camera_alert.py Documentation\n\n## Overview\n\nThe `camera_alert.py` script resides within the radar_area_detection.comm submodule of the [radar_area_detection](../radar_area_detection.md) module. This script defines a concrete implementation of an alert system based on camera activation, which integrates with the radar area detection system to respond to specific detection events using web requests.\n\n## File Structure\n\n`camera_alert.py` contains the following class:\n\n- `CameraAlert`: This class extends the abstract base Alert class, specifying actions to perform when an alert must be raised or cleared via camera control.\n\n### CameraAlert Class\n\nThe `CameraAlert` class is designed to send commands to a networked camera to start or stop recording based on area detection alerts.\n\n#### Constructor\n\n- `__init__(self, ip, port)`: Initializes the CameraAlert with network configurations.\n\n#### Attributes\n\n- `ip`: String. The IP address of the camera.\n- `port`: Integer. The port number to access the camera.\n\n#### Methods\n\n- `RaiseAlert`: Sends an HTTP POST request to the camera\u00e2\u20ac\u2122s server to start recording when an alert is raised.\n- `ClearAlert`: Sends an HTTP POST request to the camera\u00e2\u20ac\u2122s server to stop recording when an alert is cleared.\n\n## Integration with Other Modules\n\nCameraAlert extends the Alert class from [radar_area_detection.detection](../detection.md), linking camera control to the area detection logic using polymorphism.\n\n## Usage\n\nThe `CameraAlert` class may be instantiated and passed to AreaDetector instances within detection.py to specify that the camera should react to detection events.\n\nDevelopers can use this script as follows:\n\n1. Instantiate the `CameraAlert` with the appropriate IP and port for the camera.\n2. Integrate the instance with the area detection system so the camera actions are triggered during alert conditions.\n\n## Error Handling\n\nThis script catches exceptions during the HTTP request process and prints an error message, which helps to identify network or camera accessibility issues.\n\n## External Dependence\n\n`camera_alert.py` depends on the requests library to perform HTTP requests. Make sure that requests is installed and available in the project\u00e2\u20ac\u2122s environment.\n\n## Data Flow\n\n1. The area detection system determines that an alert condition is met.\n2. AreaDetector triggers the RaiseAlert method on the CameraAlert instance.\n3. CameraAlert sends an HTTP request to the camera to start or stop recording based on the state of the alert.\n\n## Developer Notes\n\nDevelopers looking to further customize behavior or add features should consider:\n\n- Expanding the CameraAlert class to handle more sophisticated network protocols or add authentication as needed.\n- Implementing retry logic or asynchronous requests if the synchronous POST request model proves to be a bottleneck or if the timeout parameter needs adjustment.\n- Adding logging mechanisms to capture successful interactions and failed attempts for better operational visibility.\n\n## Conclusion\n\nThe `camera_alert.py` script effectively bridges the gap between radar area detection and physical security measures, empowering the system to take real-world, automated actions in response to detection events. By following the robust design principle of extending an abstract base class, this module enhances the flexibility and scalability of the overall radar area detection mechanism.\n\n[Radar Area Detection](../radar_area_detection.md)"}, {"file": "docs\\radar_area_detection\\comm\\rabbitmq.md", "doc": "# Radar Area Detection: RabbitMQ Communication Interface\n\n## Overview\n\nThe rabbitmq.py is located within the radar_area_detection.comm submodule of the radar_area_detection module. This script facilitates communication between the radar area detection system and RabbitMQ message broker. The role of this interface is to enable the sending of JSON formatted data concerning area detection events and statuses over a network.\n\n## Features\n\n- Establishes a connection to RabbitMQ server using given host and port information.\n- Allows for the declaration of queues where messages will be posted.\n- Provides methods to send JSON data to specified queues.\n- Enables clean disconnection from the RabbitMQ server.\n\n## Class: RabbitMQSender\n\n### Constructor\n\n- __init__(self, host, port, queue, exchange)\nInitializes the RabbitMQSender instance with network and channel configurations.\n\n#### Parameters\n\n- host: String representing the RabbitMQ server\u00e2\u20ac\u2122s hostname.\n- port: Integer for the network port on which the RabbitMQ service is listening.\n- queue: The queue name where messages will be sent.\n- exchange: The exchange parameter specifies the exchange that the message should be routed through.\n\n### Methods\n\n- connect()\nInitiates a connection to the RabbitMQ server and declares a messaging queue.\n\n- send_json(self, data)\nSends JSON formatted data through the established channel to the specified queue.\n\n- disconnect()\nCloses the connection with the RabbitMQ server to release network resources.\n\n- configure_rabbitmq(rabbitmq_conf)\nStatic method that configures and returns a RabbitMQSender object if enabled in the configuration.\n\n#### Parameters\n\n- rabbitmq_conf: A dictionary containing RabbitMQ configuration including host, port, queue, and exchange.\n\n## Integration with Other Modules\n\nWhile not documented within the other_modules_doc, it\u00e2\u20ac\u2122s implied that this class would be used in concert with the radar_area_detection module\u00e2\u20ac\u2122s detection and alerting components. Its functionality would be particularly useful when transmitting alerts or status updates to other systems monitoring or acting based on area intrusion detection.\n\n## Usage Instructions\n\n1. Include the rabbitmq.py within your project\u00e2\u20ac\u2122s communication package.\n2. Utilize the RabbitMQSender.configure_rabbitmq() static method to provide a ready-to-use configured instance based on your RabbitMQ settings.\n3. Use the send_json method to publish messages to your RabbitMQ server as needed.\n4. Properly call disconnect to ensure graceful shutdown and cleanup of connections.\n\n## Developer Notes\n\n- Make sure RabbitMQ service is running and accessible on the specified host and port before attempting a connection.\n- Implement appropriate error handling and logging when sending messages to track any communication issues or message delivery failures.\n- Depending on the size and frequency of messages, consider potential impact on network resources and optimize the sending strategy accordingly (batching, async sending, etc.).\n- In a multi-threaded or async application, ensure thread-safety or proper synchronization when interacting with the RabbitMQ connection.\n\n## Conclusion\n\nThe rabbitmq.py script provides a straightforward and efficient means of interfacing with a RabbitMQ server to send alerts and data from the radar area detection system. It enables real-time communication and coordination with other systems within a larger infrastructure that relies on timely and reliable delivery of radar detection events."}, {"file": "docs\\radar_area_detection\\conf\\detection_conf.md", "doc": "# detection_conf.json Documentation\n\n## Overview\n\nThis documentation provides an overview of the configuration file `detection_conf.json` used within the [Radar Area Detection module](../radar_area_detection.md).\n\n## Parameters\n\n`detection_conf.json` example:\n\n
55489b7df11c86a2185017c14dd13173
{ "intermediate": 0.4197831153869629, "beginner": 0.3830278515815735, "expert": 0.19718897342681885 }
37,675
reinforcement learning for continuous space and continuous action using Deep Deterministic Policy Gradients in Tensorflow
151d47bebe6cc2d5533b9a0f08db608b
{ "intermediate": 0.04935908690094948, "beginner": 0.03256627172231674, "expert": 0.9180746078491211 }
37,676
What's the difference between these scripts, I know script 2 is updated version of 1 but how exactly please. Script 1 using UnityEngine; using UnityEngine.UI; [RequireComponent(typeof(Rigidbody))] public class HoverBoardControl : MonoBehaviour { private float boardslideSoundTime; private bool boardSlideCheck; private bool noseSlideCheck; private bool tailSlideCheck; public bool IsSteeringLeft { get { return isSteeringLeft; } } public bool IsSteeringRight { get { return isSteeringRight; } } [Header("Hover Settings")] public float hoverForce = 1500f; public float hoverHeight = 6f; public GameObject[] hoverPoints; [Header("Movement Settings")] public float forwardAcceleration = 100.0f; public float backwardAcceleration = 25.0f; [SerializeField] private float turnStrength = 10f; // Made it serializable for editor control [Header("UI & Scoring")] public Text scoreText; public int currentScore; public HUD hud; public TrickManager trickManager; [Header("Boost Settings")] public float boostMultiplier = 2.0f; [Header("Rotation Settings")] public float minRotation = -20.0f; public float maxRotation = 0.0f; [Header("Timer Settings")] public float timer = 5.0f; [Header("Miscellaneous")] public bool moving; public GameObject board; [Header("Input Settings")] public KeyCode moveForwardKey = KeyCode.W; public KeyCode moveBackwardKey = KeyCode.S; public KeyCode turnLeftKey = KeyCode.A; public KeyCode turnRightKey = KeyCode.D; [Header("Customizable Keys")] [SerializeField] private KeyCode ollieKey = KeyCode.Space; [SerializeField] private KeyCode boostKey = KeyCode.LeftControl; int layerMask; float currentThrust; float currentTurn; Rigidbody rb; private GrindDetection grindDetection; // Reference to the GrindDetection script public bool IsGrounded { get; set; } public AudioSource GrindAudio { get; set; } public Vector3 BoardTilt { get; set; } private bool isSteeringLeft; private bool isSteeringRight; void Awake() { rb = GetComponent<Rigidbody>(); hud = FindObjectOfType<HUD>(); trickManager = GetComponent<TrickManager>(); grindDetection = GetComponentInChildren<GrindDetection>(); GrindAudio = GetComponent<AudioSource>(); isSteeringLeft = false; isSteeringRight = false; } // Add the following methods to match GrindDetection expectations public void GrindAudioVolumeLerp(float targetVolume, float lerpSpeed) { GrindAudio.volume = Mathf.Lerp(GrindAudio.volume, targetVolume, lerpSpeed * Time.deltaTime); } public void RotateAround(Vector3 point, Vector3 axis, float angle) { rb.transform.RotateAround(point, axis, angle * Time.deltaTime); } public void Rotate(Vector3 axis, float angle, Space relativeTo) { rb.transform.Rotate(axis * angle * Time.deltaTime, relativeTo); } public void TransformLocalRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, targetRotation, lerpSpeed * Time.deltaTime); } public void TransformRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, targetRotation, lerpSpeed * Time.deltaTime); } // Add the following method to start the grind public void StartGrind() { grindDetection.StartGrind(); } void Start() { layerMask = 1 << LayerMask.NameToLayer("Characters"); layerMask = ~layerMask; if (scoreText == null) { GameObject canvas = GameObject.Find("Canvas"); if (canvas != null) { scoreText = Instantiate(Resources.Load("ScoreText", typeof(Text))) as Text; scoreText.transform.SetParent(canvas.transform, false); } else { Debug.LogError("Canvas not found. Make sure there is a Canvas GameObject in the scene."); } } } void OnEnable() { rb.isKinematic = false; rb.useGravity = true; } void OnDisable() { rb.velocity = Vector3.zero; rb.angularVelocity = Vector3.zero; rb.isKinematic = true; } void GetSteeringInput() { isSteeringLeft = Input.GetKey(turnLeftKey); isSteeringRight = Input.GetKey(turnRightKey); } void Update() { GetSteeringInput(); currentThrust = 0.0f; // Adjust currentTurn directly based on input if (isSteeringLeft) { currentTurn = -turnStrength; } else if (isSteeringRight) { currentTurn = turnStrength; } else { currentTurn = 0.0f; } // Allow adjustment of currentThrust from the Unity GUI inspector currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Clamp the value between -1 and 1 scoreText.text = currentScore + " Points"; if (Input.GetKeyDown(ollieKey)) { trickManager.ExecuteOllieOrTrick(1500f); } if (hud != null) { if (Input.GetKeyDown(boostKey) && (hud.BoostInt > 0)) { forwardAcceleration = forwardAcceleration * boostMultiplier; backwardAcceleration = backwardAcceleration * boostMultiplier; if (hud.BoostInt <= 0) { forwardAcceleration = forwardAcceleration / boostMultiplier; backwardAcceleration = backwardAcceleration / boostMultiplier; } } if (Input.GetKey(boostKey)) { hud.BoostInt -= 0.2f; } } Vector3 currentRotation = transform.rotation.eulerAngles; currentRotation.x = Mathf.Clamp(currentRotation.x, minRotation, maxRotation); currentRotation.z = Mathf.Clamp(currentRotation.z, minRotation, maxRotation); transform.rotation = Quaternion.Euler(currentRotation); float aclAxis = Input.GetAxis("Vertical"); if (aclAxis > 0.1f) currentThrust = aclAxis * forwardAcceleration; else if (aclAxis < -0.1f) currentThrust = aclAxis * backwardAcceleration; // Remove the following block of code related to currentTurn // if (Mathf.Abs(currentTurn) > 0.1f) // { // if (isSteeringLeft) // currentTurn = -turnStrength; // else if (isSteeringRight) // currentTurn = turnStrength; // } // currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Remove this line // Check for grinding input if (Input.GetKeyDown(grindDetection.GrindKey)) { StartGrind(); } } void FixedUpdate() { ApplyHover(); ApplyMovement(); ApplyTurning(); ApplyGrinding(); } void ApplyHover() { for (int i = 0; i < hoverPoints.Length; i++) { var hoverPoint = hoverPoints[i]; RaycastHit hoverHit; if (Physics.Raycast(hoverPoint.transform.position, -Vector3.up, out hoverHit, hoverHeight, layerMask)) { rb.AddForceAtPosition(Vector3.up * hoverForce * (1.0f - (hoverHit.distance / hoverHeight)), hoverPoint.transform.position); } else { if (transform.position.y > hoverPoint.transform.position.y) { rb.AddForceAtPosition(hoverPoint.transform.up * hoverForce, hoverPoint.transform.position); } else { rb.AddForceAtPosition(hoverPoint.transform.up * -hoverForce, hoverPoint.transform.position); } } } } void ApplyMovement() { if (Mathf.Abs(currentThrust) > 0) { rb.AddForce(transform.forward * currentThrust); } } void ApplyTurning() { if (currentTurn != 0) { rb.AddRelativeTorque(Vector3.up * currentTurn * turnStrength); } } void ApplyGrinding() { if (grindDetection.IsGrinding()) { // Apply grinding logic here // Access grindDetection properties and methods as needed // Example: Reduce hover height and force during grinding // Add your custom logic here based on the skateboard script if (boardSlideCheck || noseSlideCheck || tailSlideCheck) { boardslideSoundTime += Time.deltaTime; if (boardslideSoundTime > 0f && boardslideSoundTime < 0.1f && !GrindAudio.isPlaying) { GrindAudio.Play(); // Assuming GrindAudio is your AudioSource } } else { boardslideSoundTime = 0f; GrindAudio.Stop(); // Assuming GrindAudio is your AudioSource } // Add the rest of your custom logic from the skateboard script // Adjust rotations, movements, and other parameters as needed } else { // Reset hover height and force when not grinding // Add the rest of your logic for when not grinding // Reset any adjustments made during grinding, if necessary } } } Script 2 using UnityEngine; using UnityEngine.UI; [RequireComponent(typeof(Rigidbody))] public class HoverBoardControl : MonoBehaviour { private float boardslideSoundTime; private bool boardSlideCheck; private bool noseSlideCheck; private bool tailSlideCheck; public bool IsSteeringLeft { get { return isSteeringLeft; } } public bool IsSteeringRight { get { return isSteeringRight; } } [Header("Hover Settings")] public float hoverForce = 1500f; public float hoverHeight = 6f; public GameObject[] hoverPoints; [Header("Movement Settings")] public float forwardAcceleration = 100.0f; public float backwardAcceleration = 25.0f; [SerializeField] private float turnStrength = 10f; // Made it serializable for editor control [Header("UI & Scoring")] public Text scoreText; public int currentScore; public HUD hud; public TrickManager trickManager; [Header("Boost Settings")] public float boostMultiplier = 2.0f; [Header("Rotation Settings")] public float minRotation = -20.0f; public float maxRotation = 0.0f; [Header("Timer Settings")] public float timer = 5.0f; [Header("Miscellaneous")] public bool moving; public GameObject board; [Header("Input Settings")] public KeyCode moveForwardKey = KeyCode.W; public KeyCode moveBackwardKey = KeyCode.S; public KeyCode turnLeftKey = KeyCode.A; public KeyCode turnRightKey = KeyCode.D; [Header("Customizable Keys")] [SerializeField] private KeyCode ollieKey = KeyCode.Space; [SerializeField] private KeyCode boostKey = KeyCode.LeftControl; int layerMask; float currentThrust; float currentTurn; private GrindDetection grindDetection; // Reference to the GrindDetection script public bool IsGrounded { get; set; } public AudioSource GrindAudio { get; set; } public Vector3 BoardTilt { get; set; } private bool isSteeringLeft; private bool isSteeringRight; private Rigidbody rb; private bool isOnRamp; private Quaternion targetRampRotation; void Awake() { rb = GetComponent<Rigidbody>(); hud = FindObjectOfType<HUD>(); trickManager = GetComponent<TrickManager>(); grindDetection = GetComponentInChildren<GrindDetection>(); GrindAudio = GetComponent<AudioSource>(); isSteeringLeft = false; isSteeringRight = false; } // Add the following methods to match GrindDetection expectations public void GrindAudioVolumeLerp(float targetVolume, float lerpSpeed) { GrindAudio.volume = Mathf.Lerp(GrindAudio.volume, targetVolume, lerpSpeed * Time.deltaTime); } public void RotateAround(Vector3 point, Vector3 axis, float angle) { rb.transform.RotateAround(point, axis, angle * Time.deltaTime); } public void Rotate(Vector3 axis, float angle, Space relativeTo) { rb.transform.Rotate(axis * angle * Time.deltaTime, relativeTo); } public void TransformLocalRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, targetRotation, lerpSpeed * Time.deltaTime); } public void TransformRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, targetRotation, lerpSpeed * Time.deltaTime); } // Add the following method to start the grind public void StartGrind() { grindDetection.StartGrind(); } // Add the following method to set the hover height public void SetHoverHeight(float height) { hoverHeight = height; } void Start() { layerMask = 1 << LayerMask.NameToLayer("Characters"); layerMask = ~layerMask; if (scoreText == null) { GameObject canvas = GameObject.Find("Canvas"); if (canvas != null) { scoreText = Instantiate(Resources.Load("ScoreText", typeof(Text))) as Text; scoreText.transform.SetParent(canvas.transform, false); } else { Debug.LogError("Canvas not found. Make sure there is a Canvas GameObject in the scene."); } } } void OnEnable() { rb.isKinematic = false; rb.useGravity = true; } void OnDisable() { rb.velocity = Vector3.zero; rb.angularVelocity = Vector3.zero; rb.isKinematic = true; } void GetSteeringInput() { isSteeringLeft = Input.GetKey(turnLeftKey); isSteeringRight = Input.GetKey(turnRightKey); float aclAxis = Input.GetAxis("Vertical"); currentThrust = aclAxis * ((aclAxis > 0) ? forwardAcceleration : backwardAcceleration); } void Update() { GetSteeringInput(); // Adjust currentTurn directly based on input if (isSteeringLeft) { currentTurn = -turnStrength; } else if (isSteeringRight) { currentTurn = turnStrength; } else { currentTurn = 0.0f; } // Allow adjustment of currentThrust from the Unity GUI inspector currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Clamp the value between -1 and 1 if (Input.GetKeyDown(ollieKey)) { trickManager.ExecuteOllieOrTrick(1500f); } if (hud != null) { if (Input.GetKeyDown(boostKey) && (hud.BoostInt > 0)) { forwardAcceleration = forwardAcceleration * boostMultiplier; backwardAcceleration = backwardAcceleration * boostMultiplier; if (hud.BoostInt <= 0) { forwardAcceleration = forwardAcceleration / boostMultiplier; backwardAcceleration = backwardAcceleration / boostMultiplier; } } if (Input.GetKey(boostKey)) { hud.BoostInt -= 0.2f; } } // Check for grinding input if (Input.GetKeyDown(grindDetection.GrindKey)) { StartGrind(); } } void FixedUpdate() { ApplyHover(); ApplyMovement(); ApplyTurning(); // New code to align with ramp when on it if (grindDetection.IsGrinding() && isOnRamp) { // Align to the ramp’s angle AlignToRamp(); } ApplyGrinding(); } // New method to align the board with the ramp’s angle private void AlignToRamp() { rb.rotation = Quaternion.Slerp(rb.rotation, targetRampRotation, Time.deltaTime * 5f); // Note: ‘5f’ is a lerp speed factor that you can adjust as needed or expose as a public variable. } void ApplyHover() { for (int i = 0; i < hoverPoints.Length; i++) { var hoverPoint = hoverPoints[i]; RaycastHit hoverHit; if (Physics.Raycast(hoverPoint.transform.position, -Vector3.up, out hoverHit, hoverHeight, layerMask)) { rb.AddForceAtPosition(Vector3.up * hoverForce * (1.0f - (hoverHit.distance / hoverHeight)), hoverPoint.transform.position); } else { if (transform.position.y > hoverPoint.transform.position.y) { rb.AddForceAtPosition(hoverPoint.transform.up * hoverForce, hoverPoint.transform.position); } else { rb.AddForceAtPosition(hoverPoint.transform.up * -hoverForce, hoverPoint.transform.position); } } } } void ApplyMovement() { if (Mathf.Abs(currentThrust) > 0) { rb.AddForce(transform.forward * currentThrust); } } void ApplyTurning() { if (currentTurn != 0) { rb.AddRelativeTorque(Vector3.up * currentTurn * turnStrength); } } void ApplyGrinding() { if (grindDetection.IsGrinding()) { // Apply grinding logic here // Access grindDetection properties and methods as needed // Example: Reduce hover height and force during grinding SetHoverHeight(hoverHeight * 0.5f); // Adjust the hover height during grinding // Add your custom logic here based on the skateboard script if (boardSlideCheck || noseSlideCheck || tailSlideCheck) { boardslideSoundTime += Time.deltaTime; if (boardslideSoundTime > 0f && boardslideSoundTime < 0.1f && !GrindAudio.isPlaying) { GrindAudio.Play(); // Assuming GrindAudio is your AudioSource } } else { boardslideSoundTime = 0f; GrindAudio.Stop(); // Assuming GrindAudio is your AudioSource } // Add the rest of your custom logic from the skateboard script // Adjust rotations, movements, and other parameters as needed } else { // Reset hover height and force when not grinding // Reset the hover height when not grinding // Add the rest of your logic for when not grinding // Reset any adjustments made during grinding, if necessary } } }
1209d56beddc6022d1b2cb56dd517cc3
{ "intermediate": 0.35823220014572144, "beginner": 0.4207817614078522, "expert": 0.22098605334758759 }
37,677
Instructions: The project focuses on the use of FMCW radar. It is used for data collection, point detection, tracking and other functions. The project is broken down into several modules. The aim of this division is to decouple as much as possible the functionalities present in each module, and make the development of new functionalities more flexible and rapid. Some information will be presented to you in json format: - module: the name of the module - module_structure: the structure of the module (the files making up the module and their hierarchy) - module_files_already_generated_doc: the documentation of the module's files if already generated - other_modules_doc: the documentation of the other modules on which the module depends (possibly none at all) - gen_doc_of_file: the file you will be given to generate the documentation of. If no file is given, the documentation of the entire module must be generated. Your goal is to create a markdown documentation of the file you will be given, or the entire module if no file is given. This documentation is intended to guide a developer who is new to the project, you can therefore add whatever you feel is relevant for this task. Informations: {"module": "radar_area_detection", "module_structure": "radar_area_detection/detection.py;radar_area_detection/display.py;radar_area_detection/main.py;radar_area_detection\\comm/camera_alert.py;radar_area_detection\\comm/rabbitmq.py;radar_area_detection\\conf/6432_ti_config.cfg;radar_area_detection\\conf/6843_ti_config.cfg;radar_area_detection\\conf/detection_conf.json;radar_area_detection\\conf/main.json;radar_area_detection\\conf/rabbitmq_conf.json;radar_area_detection\\conf/tracking_conf.json;", "module_files_already_generated_doc": [{"file": "docs\\radar_area_detection\\detection.md", "doc": "# radar_area_detection/detection.py Documentation\n\n## Overview\n\nThe `detection.py` script role is to define the mechanisms for detecting if tracked objects are within configured areas and whether these instances should raise alerts.\n\n## File Structure\n\n`detection.py` consists of several classes and a data class:\n\n- `Area`: A data class that represents a geometric area in which detection is performed.\n- `Alert`: An abstract base class that outlines the required functions for alert mechanisms.\n- `AreaDetector`: A class that is responsible for detecting whether clusters fall within the defined areas and for managing the states of these areas.\n- `Profile`: A data class to handle the properties of detection profiles, which may include differentiating characteristics of targets.\n- `ProfileAreaDetector`: Specialized class that handles the assignment of clusters to profiles and updates the corresponding area detectors per profile.\n\n### Area Class\n\nThe `Area` data class defines a rectangle by its left (xl), right (xr), bottom (yb), and top (yt) boundaries, and provides a serialization method for data export and a method for generating a hash value based on its boundaries.\n\n### Alert Class\n\n`Alert` serves as a template for concrete alert classes that inherit from it. It requires subclasses to implement the RaiseAlert and ClearAlert methods to define how an alert should be triggered and cleared, respectively.\n\n### AreaDetector Class\n\n`AreaDetector` provides methods for initializing detection areas and updating their states based on the input from tracked clusters. It evaluates whether clusters are within any defined area and updates the status of areas accordingly. It is capable of managing ok, suspected, and alert areas, and it invokes alert mechanisms if necessary.\n\n### Profile Class\n\n`Profile` encapsulates different detection profiles that may be used for different types of targets. Each profile has a name, average measurements of the surface and height, weights to these measurements, an alert delay time, and an alert mechanism.\n\n### ProfileAreaDetector Class\n\n`ProfileAreaDetector` manages multiple detection profiles and their corresponding `AreaDetector` instances. It updates the detectors based on profiles that clusters are filtered into, ensuring that the correct detection logic is applied for different target types.\n\n## Integration with Other Modules\n\nThe `detection.py` script in the [radar_area_detection](radar_area_detection.md) module works in coordination with the [radar_tracking](../radar_tracking/radar_tracking.md) module (tracks objects detected by the radar) and may utilize the alerting infrastructure provided by the `radar_area_detection\\comm` submodule (for example, sending messages via [RabbitMQ](comm/rabbitmq.md)).\n\n## Alert Handling and Data Flow\n\nHere is a high-level overview of the data flow related to the detection process:\n\n1. Radar data is processed into clusters of points representing detected objects.\n2. `ProfileAreaDetector` filters clusters into appropriate profiles.\n3. `AreaDetector` detects clusters within designated areas per profile.\n4. Upon detecting suspicious activity, a delay timer is initiated before raising an alert.\n5. If a cluster remains in a suspicious area past a certain delay, an alert is triggered through the implemented `Alert` subclass.\n6. If no suspicious activity is detected, the areas are marked as \"ok\" and any active alerts are cleared.\n\n## Developer Notes\n\n- Alert delay times should be carefully selected based on the sensitivity required and false positive reduction needs.\n- Serialization methods in data classes provide an easy means for data to be exported or logged, aiding debugging or analytics processes.\n\n## Conclusion\n\nThe detection.py script within the radar_area_detection module is central to ensuring the safety and security of monitored areas using FMCW radar technology. It demonstrates a flexible and extensible approach to area detection by employing abstract classes, allowing for customizable alert mechanisms and adaptability for various target profiles. The integration of this script within the radar application plays a significant role in translating raw radar data into actionable alerts and area status updates.\n\n[Radar Area Detection](radar_area_detection.md)"}, {"file": "docs\\radar_area_detection\\display.md", "doc": "# radar_area_detection/display.py Documentation\n\n## Overview\n\nThe `display.py` script is part of the [radar_area_detection](radar_area_detection.md) module, which integrates with the area detection and object tracking functionalities. This script specifically provides visualization capabilities, extending the `TrackingDisplay2D` class from the radar_tracking module to represent detection areas and the status of those areas (either as \"ok\" or under \"alert\").\n\n## File Structure\n\ndisplay.py includes the following class:\n\n- `AreasDisplay2D`: Extends the `TrackingDisplay2D` class to display detection areas and denote their status on a 2D plot.\n\n### AreasDisplay2D Class\n\nThe `AreasDisplay2D` class is responsible for visualizing areas on a 2D display, differentiating between areas that are clear (ok) and those that have an active alert. It allows real-time updates of the visualization as the status of the areas changes according to the radar data.\n\n#### Constructor\n\n- **\\_\\_init__(self, *args, \\*\\*kwargs)**: Initializes the `AreasDisplay2D` class with arguments passed to its superclass and initializes an empty list for patches.\n\n#### Attributes\n\n- **patches**: List. Stores the drawing patches corresponding to areas on the display.\n\n#### Methods\n\n- **_prepare_draw(self, clusters, ok_areas: List[Area], alert_areas: List[Area])**: Internally prepares the drawing of the clusters and areas. It removes any existing patches and re-adds them according to the updated area states.\n- **show(self, clusters, area_detector: AreaDetector)**: Public method to display the radar clusters and areas. This method calls _prepare_draw with the ok_areas and alert_areas from an AreaDetector instance and then proceeds to draw the updated visualization.\n\n### Integration with Other Modules\n\nThe detect.py script in the [radar_area_detection](radar_area_detection.md) module interacts with the AreasDisplay2D class to visualize active areas and alerts. Moreover, the AreasDisplay2D class uses functionality from the radar_tracking module\u00e2\u20ac\u2122s visualization utilities, signifying how various modules within the system come together to fulfill the project\u00e2\u20ac\u2122s overall objectives.\n\n## Usage\n\nDevelopers can instantiate the `AreasDisplay2D` class to visualise the area detection results in the context of FMCW radar applications. This enables a graphical representation of which areas are clear (ok) and which are under alert.\n\n## Data Flow\n\n1. An instance of `AreasDisplay2D` is created with appropriate arguments.\n2. Radar data (clusters) and area statuses are processed by an `AreaDetector`.\n3. Area statuses and clusters are passed to the `AreasDisplay2D` instance\u00e2\u20ac\u2122s show method.\n4. The show method updates the visual representation according to the current state of detection areas and tracked clusters.\n\n## Developer Notes\n\n- Use the `AreasDisplay2D` class to create a visual feedback loop for area detection and alert visualization in real-time radar applications.\n- Ensure active areas and their statuses are correctly updated and reflected in the visualization to provide continuous situational awareness.\n- The visual representation is crucial for debugging and monitoring the radar area detection functionality, hence strive for clear visual distinction between different states for better readability and interpretation.\n\n## Conclusion\n\n`display.py` within the [radar_area_detection](radar_area_detection.md) module serves a pivotal role in visually representing the status of detection areas in real-time. The AreasDisplay2D class demonstrates an effective integration of visualization techniques with radar detection and tracking capabilities. It contributes to the wider goal of creating a modular and adaptable system for FMCW radar applications.\n\n[Radar Area Detection](radar_area_detection.md)"}, {"file": "docs\\radar_area_detection\\main.md", "doc": "# radar_area_detection/main.py Documentation\n\n## Overview\n\nThe `main.py` script is part of the [radar_area_detection](radar_area_detection.md) module. It orchestrates live area detection leveraging data from mmWave radar hardware. Using configuration files, this script setups the radar system, executes detection and tracking algorithms, and detects intrusions within predefined areas. If configured, it sends alert notifications and visualizes the detection areas.\n\n## Usage\n\nThis script should be run with a parameter to indicate the configuration file to use:\n\n
f30c7e83ee39c18b74e44dc81bfa857c
{ "intermediate": 0.3568703234195709, "beginner": 0.39521145820617676, "expert": 0.2479182332754135 }
37,678
reinforcement learning for continuous space and continuous action using Deep Deterministic Policy Gradients in Tensorflow and how to train it and how to save trained model and use it again
eb56c77b8f2299071d3ae92c7c89d875
{ "intermediate": 0.027576083317399025, "beginner": 0.01539251022040844, "expert": 0.9570313692092896 }
37,679
reinforcement learning for continuous space and continuous action using Deep Deterministic Policy Gradients in Tensorflow and how to train it and how to save trained model and use it again for performance test, explain in detail with code
021db525de6b7319a8a09a2a98126d36
{ "intermediate": 0.030326852574944496, "beginner": 0.017838463187217712, "expert": 0.9518346786499023 }
37,680
in context of Revit MEP clash detection , reinforcement learning for continuous space and continuous action using Deep Deterministic Policy Gradients in Tensorflow and how to train it and how to save trained model and use it again for performance test, explain in detail with code
3876067fb2a9eceb58ee5487782f9222
{ "intermediate": 0.0526047982275486, "beginner": 0.017524955794215202, "expert": 0.9298702478408813 }
37,681
what materials should i use for my dt projects
2d4b84db99dc3b28e925c8173cb706ea
{ "intermediate": 0.5645909905433655, "beginner": 0.20885908603668213, "expert": 0.2265499085187912 }
37,682
can u explain me what does this function do: const getValidationConsumption = async (req, res) => { try { const company = req.company const { ValidationConsumption, Validation } = req.app.models const estTime = moment().tz('Canada/Eastern') const isWeekdayRange = estTime.isoWeekday() >= 1 && estTime.isoWeekday() <= 5 const dayOfWeekCondition = isWeekdayRange ? [1, 2, 3, 4, 5] : [1, 7] const validationCount = await ValidationConsumption.count({ where: { companyId: company.id, deletedAt: null, [Op.and]: [literal(`EXTRACT(DOW FROM "ValidationConsumption"."createdAt") IN (${dayOfWeekCondition.join(',')})`)], }, include: [{ model: Validation, where: { throughBulk: true } }], }) const remainigValidations = 150 - validationCount return res.status(200).json({ remainigValidations }) } catch (err) { console.error('Error fetching validation consumption: ', err) res.status(500).json({ message: 'Internal Server Error' }) } }
ab65f6d0c65ce0a81b52d3d0d8e54fa1
{ "intermediate": 0.6018986701965332, "beginner": 0.29380735754966736, "expert": 0.10429400950670242 }
37,683
how to add method having arguments in python class after object creation
6417724b60c6a0dd48a6341632c7d8bc
{ "intermediate": 0.37100520730018616, "beginner": 0.4418114423751831, "expert": 0.18718333542346954 }
37,684
send request body in get http angular
10cd2c1902b1ebd4b07781c2360b73fa
{ "intermediate": 0.3963950276374817, "beginner": 0.21752528846263885, "expert": 0.38607969880104065 }
37,685
J'ai ce fichier php : <?php namespace Form; class Question { public function __construct( protected string $uuid, protected string $name, protected string $type, protected string $text, protected array $choices = [], protected string $answer, protected int $score, ) { } public function getUuid(): string { return $this->uuid; } public function getName(): string { return $this->name; } public function getType(): string { return $this->type; } public function getText(): string { return $this->text; } public function getChoices(): array { return $this->choices; } public function getAnswer(): string { return $this->answer; } public function getScore(): int { return $this->score; } } ?> pourquoi j'ai ce problème ? : Deprecated: Optional parameter $choices declared before required parameter $score is implicitly treated as a required parameter in C:\Users\amael\OneDrive\Bureau\cours\php\QUIZZ_PHP\TP_QUIZZ\Classes\Form\Question.php on line 7
62526cf268dd22b18faf60120fc59a92
{ "intermediate": 0.2578810453414917, "beginner": 0.47966909408569336, "expert": 0.26244986057281494 }
37,686
Assume your a unity 3d expert and visual studio. great. What are the major differences in control between script 1 and 2. Script 1 has a stable hoverboard and can grind really well. The changes made by the script 2 update prevent any kind of grinding and make the board aCT IN VERY BIZARRE WAYS LIKE FLINGING AROUND THE Y AXIS IN A ROTATION making the game unplayable RATHER Than grinding. But script 2 does change the rotation of the board on the x axis to match the angle of the ramp it is on. All I want is for script 1s controls and grinding and only the x axis to align with a ramp while it is being detected that it is on the ramp (Via tags and layers) but I have already tried and it doesn't work as stated, is there a different way to detect the ramp and change the x rotation only to align with it? Script 1: using UnityEngine; using UnityEngine.UI; [RequireComponent(typeof(Rigidbody))] public class HoverBoardControl : MonoBehaviour { private float boardslideSoundTime; private bool boardSlideCheck; private bool noseSlideCheck; private bool tailSlideCheck; public bool IsSteeringLeft { get { return isSteeringLeft; } } public bool IsSteeringRight { get { return isSteeringRight; } } [Header("Hover Settings")] public float hoverForce = 1500f; public float hoverHeight = 6f; public GameObject[] hoverPoints; [Header("Movement Settings")] public float forwardAcceleration = 100.0f; public float backwardAcceleration = 25.0f; [SerializeField] private float turnStrength = 10f; // Made it serializable for editor control [Header("UI & Scoring")] public Text scoreText; public int currentScore; public HUD hud; public TrickManager trickManager; [Header("Boost Settings")] public float boostMultiplier = 2.0f; [Header("Rotation Settings")] public float minRotation = -20.0f; public float maxRotation = 0.0f; [Header("Timer Settings")] public float timer = 5.0f; [Header("Miscellaneous")] public bool moving; public GameObject board; [Header("Input Settings")] public KeyCode moveForwardKey = KeyCode.W; public KeyCode moveBackwardKey = KeyCode.S; public KeyCode turnLeftKey = KeyCode.A; public KeyCode turnRightKey = KeyCode.D; [Header("Customizable Keys")] [SerializeField] private KeyCode ollieKey = KeyCode.Space; [SerializeField] private KeyCode boostKey = KeyCode.LeftControl; int layerMask; float currentThrust; float currentTurn; Rigidbody rb; private GrindDetection grindDetection; // Reference to the GrindDetection script public bool IsGrounded { get; set; } public AudioSource GrindAudio { get; set; } public Vector3 BoardTilt { get; set; } private bool isSteeringLeft; private bool isSteeringRight; void Awake() { rb = GetComponent<Rigidbody>(); hud = FindObjectOfType<HUD>(); trickManager = GetComponent<TrickManager>(); grindDetection = GetComponentInChildren<GrindDetection>(); GrindAudio = GetComponent<AudioSource>(); isSteeringLeft = false; isSteeringRight = false; } // Add the following methods to match GrindDetection expectations public void GrindAudioVolumeLerp(float targetVolume, float lerpSpeed) { GrindAudio.volume = Mathf.Lerp(GrindAudio.volume, targetVolume, lerpSpeed * Time.deltaTime); } public void RotateAround(Vector3 point, Vector3 axis, float angle) { rb.transform.RotateAround(point, axis, angle * Time.deltaTime); } public void Rotate(Vector3 axis, float angle, Space relativeTo) { rb.transform.Rotate(axis * angle * Time.deltaTime, relativeTo); } public void TransformLocalRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, targetRotation, lerpSpeed * Time.deltaTime); } public void TransformRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, targetRotation, lerpSpeed * Time.deltaTime); } // Add the following method to start the grind public void StartGrind() { grindDetection.StartGrind(); } void Start() { layerMask = 1 << LayerMask.NameToLayer("Characters"); layerMask = ~layerMask; if (scoreText == null) { GameObject canvas = GameObject.Find("Canvas"); if (canvas != null) { scoreText = Instantiate(Resources.Load("ScoreText", typeof(Text))) as Text; scoreText.transform.SetParent(canvas.transform, false); } else { Debug.LogError("Canvas not found. Make sure there is a Canvas GameObject in the scene."); } } } void OnEnable() { rb.isKinematic = false; rb.useGravity = true; } void OnDisable() { rb.velocity = Vector3.zero; rb.angularVelocity = Vector3.zero; rb.isKinematic = true; } void GetSteeringInput() { isSteeringLeft = Input.GetKey(turnLeftKey); isSteeringRight = Input.GetKey(turnRightKey); } void Update() { GetSteeringInput(); currentThrust = 0.0f; // Adjust currentTurn directly based on input if (isSteeringLeft) { currentTurn = -turnStrength; } else if (isSteeringRight) { currentTurn = turnStrength; } else { currentTurn = 0.0f; } // Allow adjustment of currentThrust from the Unity GUI inspector currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Clamp the value between -1 and 1 scoreText.text = currentScore + " Points"; if (Input.GetKeyDown(ollieKey)) { trickManager.ExecuteOllieOrTrick(1500f); } if (hud != null) { if (Input.GetKeyDown(boostKey) && (hud.BoostInt > 0)) { forwardAcceleration = forwardAcceleration * boostMultiplier; backwardAcceleration = backwardAcceleration * boostMultiplier; if (hud.BoostInt <= 0) { forwardAcceleration = forwardAcceleration / boostMultiplier; backwardAcceleration = backwardAcceleration / boostMultiplier; } } if (Input.GetKey(boostKey)) { hud.BoostInt -= 0.2f; } } Vector3 currentRotation = transform.rotation.eulerAngles; currentRotation.x = Mathf.Clamp(currentRotation.x, minRotation, maxRotation); currentRotation.z = Mathf.Clamp(currentRotation.z, minRotation, maxRotation); transform.rotation = Quaternion.Euler(currentRotation); float aclAxis = Input.GetAxis("Vertical"); if (aclAxis > 0.1f) currentThrust = aclAxis * forwardAcceleration; else if (aclAxis < -0.1f) currentThrust = aclAxis * backwardAcceleration; // Remove the following block of code related to currentTurn // if (Mathf.Abs(currentTurn) > 0.1f) // { // if (isSteeringLeft) // currentTurn = -turnStrength; // else if (isSteeringRight) // currentTurn = turnStrength; // } // currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Remove this line // Check for grinding input if (Input.GetKeyDown(grindDetection.GrindKey)) { StartGrind(); } } void FixedUpdate() { ApplyHover(); ApplyMovement(); ApplyTurning(); ApplyGrinding(); } void ApplyHover() { for (int i = 0; i < hoverPoints.Length; i++) { var hoverPoint = hoverPoints[i]; RaycastHit hoverHit; if (Physics.Raycast(hoverPoint.transform.position, -Vector3.up, out hoverHit, hoverHeight, layerMask)) { rb.AddForceAtPosition(Vector3.up * hoverForce * (1.0f - (hoverHit.distance / hoverHeight)), hoverPoint.transform.position); } else { if (transform.position.y > hoverPoint.transform.position.y) { rb.AddForceAtPosition(hoverPoint.transform.up * hoverForce, hoverPoint.transform.position); } else { rb.AddForceAtPosition(hoverPoint.transform.up * -hoverForce, hoverPoint.transform.position); } } } } void ApplyMovement() { if (Mathf.Abs(currentThrust) > 0) { rb.AddForce(transform.forward * currentThrust); } } void ApplyTurning() { if (currentTurn != 0) { rb.AddRelativeTorque(Vector3.up * currentTurn * turnStrength); } } void ApplyGrinding() { if (grindDetection.IsGrinding()) { // Apply grinding logic here // Access grindDetection properties and methods as needed // Example: Reduce hover height and force during grinding // Add your custom logic here based on the skateboard script if (boardSlideCheck || noseSlideCheck || tailSlideCheck) { boardslideSoundTime += Time.deltaTime; if (boardslideSoundTime > 0f && boardslideSoundTime < 0.1f && !GrindAudio.isPlaying) { GrindAudio.Play(); // Assuming GrindAudio is your AudioSource } } else { boardslideSoundTime = 0f; GrindAudio.Stop(); // Assuming GrindAudio is your AudioSource } // Add the rest of your custom logic from the skateboard script // Adjust rotations, movements, and other parameters as needed } else { // Reset hover height and force when not grinding // Add the rest of your logic for when not grinding // Reset any adjustments made during grinding, if necessary } } } Script 2: using UnityEngine; using UnityEngine.UI; [RequireComponent(typeof(Rigidbody))] public class HoverBoardControl : MonoBehaviour { private float boardslideSoundTime; private bool boardSlideCheck; private bool noseSlideCheck; private bool tailSlideCheck; public bool IsSteeringLeft { get { return isSteeringLeft; } } public bool IsSteeringRight { get { return isSteeringRight; } } [Header("Hover Settings")] public float hoverForce = 1500f; public float hoverHeight = 6f; public GameObject[] hoverPoints; [Header("Movement Settings")] public float forwardAcceleration = 100.0f; public float backwardAcceleration = 25.0f; [SerializeField] private float turnStrength = 10f; // Made it serializable for editor control [Header("UI & Scoring")] public Text scoreText; public int currentScore; public HUD hud; public TrickManager trickManager; [Header("Boost Settings")] public float boostMultiplier = 2.0f; [Header("Rotation Settings")] public float minRotation = -20.0f; public float maxRotation = 0.0f; [Header("Timer Settings")] public float timer = 5.0f; [Header("Miscellaneous")] public bool moving; public GameObject board; [Header("Input Settings")] public KeyCode moveForwardKey = KeyCode.W; public KeyCode moveBackwardKey = KeyCode.S; public KeyCode turnLeftKey = KeyCode.A; public KeyCode turnRightKey = KeyCode.D; [Header("Customizable Keys")] [SerializeField] private KeyCode ollieKey = KeyCode.Space; [SerializeField] private KeyCode boostKey = KeyCode.LeftControl; int layerMask; float currentThrust; float currentTurn; private GrindDetection grindDetection; // Reference to the GrindDetection script public bool IsGrounded { get; set; } public AudioSource GrindAudio { get; set; } public Vector3 BoardTilt { get; set; } private bool isSteeringLeft; private bool isSteeringRight; private Rigidbody rb; private bool isOnRamp; private Quaternion targetRampRotation; void Awake() { rb = GetComponent<Rigidbody>(); hud = FindObjectOfType<HUD>(); trickManager = GetComponent<TrickManager>(); grindDetection = GetComponentInChildren<GrindDetection>(); GrindAudio = GetComponent<AudioSource>(); isSteeringLeft = false; isSteeringRight = false; } // Add the following methods to match GrindDetection expectations public void GrindAudioVolumeLerp(float targetVolume, float lerpSpeed) { GrindAudio.volume = Mathf.Lerp(GrindAudio.volume, targetVolume, lerpSpeed * Time.deltaTime); } public void RotateAround(Vector3 point, Vector3 axis, float angle) { rb.transform.RotateAround(point, axis, angle * Time.deltaTime); } public void Rotate(Vector3 axis, float angle, Space relativeTo) { rb.transform.Rotate(axis * angle * Time.deltaTime, relativeTo); } public void TransformLocalRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.localRotation = Quaternion.Lerp(rb.transform.localRotation, targetRotation, lerpSpeed * Time.deltaTime); } public void TransformRotationLerp(Quaternion targetRotation, float lerpSpeed) { rb.transform.rotation = Quaternion.Lerp(rb.transform.rotation, targetRotation, lerpSpeed * Time.deltaTime); } // Add the following method to start the grind public void StartGrind() { grindDetection.StartGrind(); } // Add the following method to set the hover height public void SetHoverHeight(float height) { hoverHeight = height; } void Start() { layerMask = 1 << LayerMask.NameToLayer("Characters"); layerMask = ~layerMask; if (scoreText == null) { GameObject canvas = GameObject.Find("Canvas"); if (canvas != null) { scoreText = Instantiate(Resources.Load("ScoreText", typeof(Text))) as Text; scoreText.transform.SetParent(canvas.transform, false); } else { Debug.LogError("Canvas not found. Make sure there is a Canvas GameObject in the scene."); } } } void OnEnable() { rb.isKinematic = false; rb.useGravity = true; } void OnDisable() { rb.velocity = Vector3.zero; rb.angularVelocity = Vector3.zero; rb.isKinematic = true; } void GetSteeringInput() { isSteeringLeft = Input.GetKey(turnLeftKey); isSteeringRight = Input.GetKey(turnRightKey); float aclAxis = Input.GetAxis("Vertical"); currentThrust = aclAxis * ((aclAxis > 0) ? forwardAcceleration : backwardAcceleration); } void Update() { GetSteeringInput(); // Adjust currentTurn directly based on input if (isSteeringLeft) { currentTurn = -turnStrength; } else if (isSteeringRight) { currentTurn = turnStrength; } else { currentTurn = 0.0f; } // Allow adjustment of currentThrust from the Unity GUI inspector currentTurn = Mathf.Clamp(currentTurn, -1.0f, 1.0f); // Clamp the value between -1 and 1 if (Input.GetKeyDown(ollieKey)) { trickManager.ExecuteOllieOrTrick(1500f); } if (hud != null) { if (Input.GetKeyDown(boostKey) && (hud.BoostInt > 0)) { forwardAcceleration = forwardAcceleration * boostMultiplier; backwardAcceleration = backwardAcceleration * boostMultiplier; if (hud.BoostInt <= 0) { forwardAcceleration = forwardAcceleration / boostMultiplier; backwardAcceleration = backwardAcceleration / boostMultiplier; } } if (Input.GetKey(boostKey)) { hud.BoostInt -= 0.2f; } } // Check for grinding input if (Input.GetKeyDown(grindDetection.GrindKey)) { StartGrind(); } } void FixedUpdate() { ApplyHover(); ApplyMovement(); ApplyTurning(); // New code to align with ramp when on it if (grindDetection.IsGrinding() && isOnRamp) { // Align to the ramp’s angle AlignToRamp(); } ApplyGrinding(); } // New method to align the board with the ramp’s angle private void AlignToRamp() { rb.rotation = Quaternion.Slerp(rb.rotation, targetRampRotation, Time.deltaTime * 5f); // Note: ‘5f’ is a lerp speed factor that you can adjust as needed or expose as a public variable. } void ApplyHover() { for (int i = 0; i < hoverPoints.Length; i++) { var hoverPoint = hoverPoints[i]; RaycastHit hoverHit; if (Physics.Raycast(hoverPoint.transform.position, -Vector3.up, out hoverHit, hoverHeight, layerMask)) { rb.AddForceAtPosition(Vector3.up * hoverForce * (1.0f - (hoverHit.distance / hoverHeight)), hoverPoint.transform.position); } else { if (transform.position.y > hoverPoint.transform.position.y) { rb.AddForceAtPosition(hoverPoint.transform.up * hoverForce, hoverPoint.transform.position); } else { rb.AddForceAtPosition(hoverPoint.transform.up * -hoverForce, hoverPoint.transform.position); } } } } void ApplyMovement() { if (Mathf.Abs(currentThrust) > 0) { rb.AddForce(transform.forward * currentThrust); } } void ApplyTurning() { if (currentTurn != 0) { rb.AddRelativeTorque(Vector3.up * currentTurn * turnStrength); } } void ApplyGrinding() { if (grindDetection.IsGrinding()) { // Apply grinding logic here // Access grindDetection properties and methods as needed // Example: Reduce hover height and force during grinding // Adjust the hover height during grinding // Add your custom logic here based on the skateboard script if (boardSlideCheck || noseSlideCheck || tailSlideCheck) { boardslideSoundTime += Time.deltaTime; if (boardslideSoundTime > 0f && boardslideSoundTime < 0.1f && !GrindAudio.isPlaying) { GrindAudio.Play(); // Assuming GrindAudio is your AudioSource } } else { boardslideSoundTime = 0f; GrindAudio.Stop(); // Assuming GrindAudio is your AudioSource } // Add the rest of your custom logic from the skateboard script // Adjust rotations, movements, and other parameters as needed } else { // Reset hover height and force when not grinding // Reset the hover height when not grinding // Add the rest of your logic for when not grinding // Reset any adjustments made during grinding, if necessary } } }
ee33a8d8378f939ea9352a5d3dc79a7c
{ "intermediate": 0.32454419136047363, "beginner": 0.4718136787414551, "expert": 0.2036421149969101 }
37,687
how to mock this api : /validate-wallet request_body: { "msisdn": "1011111111", "nationalId": "32434232422432" } response : { "fullName": "عمر امجد عثمان ابوالنجا ", "storeName": "test", "msisdn": "1011111111", "nationalId": "32434232422432", "gender": "M", "address": "ش الاصبغ-حلمية الزيتون", "country":"Egypt", "birthDate": '2000-12-29', "nationality": null, "staffId": "2122121", "agentName": "hamda" }
91913925cab3c0728f8258ef5b0c8e97
{ "intermediate": 0.6729405522346497, "beginner": 0.19589745998382568, "expert": 0.13116194307804108 }
37,688
make a window of a program in python with three buttons. open image, below that next image next to previous image. below that add limit checkbox. below that show total img in label. below that show img count in a label and below that show img name in a label the open img will open a folder and list the files that fits img_EXTENSIONS = [‘.jpg’, ‘.jpeg’, ‘.png’, ‘.gif’, ‘.bmp’, ‘.webp’]. put it it file index list and number it. if i press the open folder again, and select a new folder it index the file of the new folder instead. if i didn't select a new folder, or press cancel, it uses the last used folder. if there's no image found, play a messagebox. add a history pool. the next image button will call the next img in the history pool or a random image number and add it to the end of the history pool. the previous image button will call the previous image number in the history pool. if it reaches the first image(top of the pool), it adds a random image number from the index to the top of the pool instead of stopping.
163b6495231d2ae5e9dafb1ae8c648e4
{ "intermediate": 0.3699950873851776, "beginner": 0.24772511422634125, "expert": 0.38227981328964233 }
37,689
Can you please refactor (x-y)/x
7a2d201e069b01c90636476b5f6db022
{ "intermediate": 0.3987853527069092, "beginner": 0.2995409667491913, "expert": 0.3016737103462219 }
37,690
{socialLinks.map((link, index) => { return ( <BoutonReseau key={index} href={link.reseau} type={getSocialNetworkType(link.reseau)} /> );Uncaught TypeError: socialLinks.map is not a function : import { useEffect, useState, useRef } from "react"; import { Link, useLocation } from "react-router-dom"; import BoutonReseau from "../../components/Artiste/BoutonReseau"; import { motion } from "framer-motion"; import axios from "axios"; export default function PageEvenement() { const location = useLocation(); const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [nomArtiste, setNomArtiste] = useState(location.state?.nomArtiste); const [date, setDate] = useState(location.state?.date); const [heure, setHeure] = useState(location.state?.heure); const titleRef = useRef<HTMLHeadingElement>(null); const [description, setDescription] = useState( location.state?.description || "Description par défaut si aucune description n'est passée." ); const [socialLinks, setSocialLinks] = useState<{ reseau: string }[]>([]); const params = new URLSearchParams(window.location.search); const idArtiste = params.get("id"); const [windowWidth, setWindowWidth] = useState(window.innerWidth); const [infosGridPosition, setInfosGridPosition] = useState<"top" | "bottom">( "top" ); function getSocialNetworkType(url: string): "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube" { if (url.includes("soundcloud")) { return "soundcloud"; } else if (url.includes("spotify")) { return "spotify"; } else if (url.includes("instagram")) { return "instagram"; } else if (url.includes("twitter")) { return "twitter"; } else if (url.includes("youtube")) { return "youtube"; } else { throw new Error(`Unsupported social network URL: ${url}`); } } useEffect(() => { axios .get("http://localhost:8080/getSocialLinks/" + idArtiste) .then((response) => { setSocialLinks(response.data); console.log(response.data); }) .catch((error) => { console.error( "Erreur lors de la récupération des liens de réseaux sociaux", error ); }); const handleResize = () => { setWindowWidth(window.innerWidth); }; window.addEventListener("resize", handleResize); return () => { window.removeEventListener("resize", handleResize); }; }, []); useEffect(() => { if (titleRef.current) { // regarde si la width du titre est plus grande que 25% de la width de la fenetre - 2*3rem if (titleRef.current.offsetWidth > windowWidth / 4 - 2 * 3 * 16) { setInfosGridPosition("top"); } else { setInfosGridPosition("bottom"); } } }, [titleRef, windowWidth]); const infosVariants = { initial: { opacity: 0, transition: { duration: 0.1, ease: "easeInOut", }, }, visible: { opacity: 1, transition: { delay: 0.8, duration: 0.6, ease: "easeInOut", }, }, }; const overlayVariants = { initial: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 20%, rgba(0, 0, 0, 0) 60%)", transition: { duration: 0.1, ease: "easeInOut", }, }, extended: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 40%, rgba(0, 0, 0, 0) 100%)", transition: { delay: 0.6, duration: 0.6, ease: "easeInOut", }, }, }; return ( <div id="PageArtiste"> <motion.div className="overlay" variants={overlayVariants} initial="initial" animate={infosGridPosition === "top" ? "extended" : "initial"} /> <img src={"http://localhost:8080/getImageArtiste/" + idArtiste} alt="image de fond" /> <div className="content" style={{ columnGap: infosGridPosition === "top" ? "0" : "5rem", rowGap: windowWidth > 991 ? "0" : "5rem", }} > <h3 ref={titleRef}> {nomArtiste .toUpperCase() .split(" ") .map((mot: string, index: number) => { return ( <span key={index}> {mot} <br /> </span> ); })} </h3> <motion.div className="infos" variants={infosVariants} initial="initial" animate="visible" exit="initial" style={{ gridArea: infosGridPosition }} > <p className="description">{description}</p> <div className="les-reseaux"> {socialLinks.map((link, index) => { return ( <BoutonReseau key={index} href={link.reseau} type={getSocialNetworkType(link.reseau)} /> ); })} {/* <BoutonReseau href="https://www.soundcloud.com/" type='soundcloud' /> <BoutonReseau href="https://www.spotify.com/" type='spotify'/> <BoutonReseau href="https://www.instagram.com/" type='instagram'/> <BoutonReseau href="https://www.twitter.com/" type='twitter'/> <BoutonReseau href="https://www.youtube.com/" type='youtube'/> */} </div> <Link to="/programmation" state={{ comesFromPageArtist: idArtiste, oldX: oldX, oldY: oldY, oldGroupes: oldGroupes, }} className="btn-retour" > <svg width="36" height="28" viewBox="0 0 36 28" fill="none" xmlns="http://www.w3.org/2000/svg" > <rect x="6.52539" y="0.321533" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(45 6.52539 0.321533)" /> <rect x="3.87891" y="25.5957" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(-45 3.87891 25.5957)" /> </svg> </Link> </motion.div> <div className="date-heure"> <h4>{date}</h4> <h4>{heure}</h4> </div> </div> </div> ); } import { motion } from 'framer-motion'; import {useState} from 'react' type Props = { href : string; type: "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube"; } export default function BoutonReseau(props: Props) { const [isHovered, setIsHovered] = useState(false) const btnVariants = { default:{ backgroundColor:"#E45A3B00", transition:{ duration: 0.3, ease:"easeOut", } }, hover:{ backgroundColor:"#E45A3B", transition:{ duration: 0.3, ease:"easeOut", } } } const svgVariants = { default:{ fill:"#E45A3B", transition:{ duration:0.3, ease: [1, 0, 0,1] } }, hover:{ fill:"#FFFBEE", transition:{ duration:0.3, ease: [1, 0, 0,1] } } } return ( <motion.a href={props.href} className='btn-reseau' onMouseEnter={() => setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} variants={btnVariants} initial="default" animate={isHovered ? "hover" : "default"} > { props.type === "soundcloud" ? ( <svg width="51" height="22" viewBox="0 0 51 22" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M24.0742 3.20787V21.779H44.0157C48.2271 21.4821 50.1166 18.878 50.1166 15.68C50.1166 12.2993 47.567 9.581 44.1523 9.581C43.2873 9.581 42.5588 9.76374 41.8076 10.0835C41.2612 4.73833 36.7312 0.558105 31.1539 0.558105C28.4905 0.558105 25.9637 1.56319 24.0742 3.20787ZM22.071 5.53783C21.388 5.12666 20.6596 4.80686 19.8628 4.64696V21.779H23.0271V4.28148C22.6856 4.64696 22.3669 5.10382 22.071 5.53783ZM16.7214 4.30432V21.779H18.8157V4.37285C18.3832 4.30432 17.9506 4.28148 17.4954 4.28148C17.2222 4.28148 16.9718 4.28148 16.7214 4.30432ZM12.5555 5.7891V21.779H14.6271V4.73833C13.8758 4.9896 13.1701 5.35509 12.5555 5.7891ZM8.75387 11.4998C8.61729 11.4998 8.4807 11.3627 8.32135 11.2942V21.779H10.4157V7.75358C9.57339 8.85003 9.00428 10.1521 8.75387 11.4998ZM4.10996 10.8602V21.5735C4.58801 21.7105 5.13435 21.779 5.72622 21.779H6.22704V10.6775C6.04492 10.6546 5.86281 10.6318 5.72622 10.6318C5.13435 10.6318 4.58801 10.7231 4.10996 10.8602ZM0.0351562 16.2054C0.0351562 17.9186 0.809142 19.4262 2.01565 20.4542V11.9795C0.809142 12.9846 0.0351562 14.515 0.0351562 16.2054Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "spotify" ? ( <svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M31.8 17.8C25.4 14 14.7 13.6 8.6 15.5C7.6 15.8 6.6 15.2 6.3 14.3C6 13.3 6.6 12.3 7.5 12C14.6 9.9 26.3 10.3 33.7 14.7C34.6 15.2 34.9 16.4 34.4 17.3C33.9 18 32.7 18.3 31.8 17.8ZM31.6 23.4C31.1 24.1 30.2 24.4 29.5 23.9C24.1 20.6 15.9 19.6 9.6 21.6C8.8 21.8 7.9 21.4 7.7 20.6C7.5 19.8 7.9 18.9 8.7 18.7C16 16.5 25 17.6 31.2 21.4C31.8 21.7 32.1 22.7 31.6 23.4ZM29.2 28.9C28.8 29.5 28.1 29.7 27.5 29.3C22.8 26.4 16.9 25.8 9.9 27.4C9.2 27.6 8.6 27.1 8.4 26.5C8.2 25.8 8.7 25.2 9.3 25C16.9 23.3 23.5 24 28.7 27.2C29.4 27.5 29.5 28.3 29.2 28.9ZM20 0C17.3736 0 14.7728 0.517315 12.3463 1.52241C9.91982 2.5275 7.71504 4.00069 5.85786 5.85786C2.10714 9.60859 0 14.6957 0 20C0 25.3043 2.10714 30.3914 5.85786 34.1421C7.71504 35.9993 9.91982 37.4725 12.3463 38.4776C14.7728 39.4827 17.3736 40 20 40C25.3043 40 30.3914 37.8929 34.1421 34.1421C37.8929 30.3914 40 25.3043 40 20C40 17.3736 39.4827 14.7728 38.4776 12.3463C37.4725 9.91982 35.9993 7.71504 34.1421 5.85786C32.285 4.00069 30.0802 2.5275 27.6537 1.52241C25.2272 0.517315 22.6264 0 20 0Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "instagram" ? ( <svg width="45" height="45" viewBox="0 0 45 45" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M13.0353 0H31.9141C39.106 0 44.9494 5.84342 44.9494 13.0353V31.9141C44.9494 35.3712 43.576 38.6868 41.1314 41.1314C38.6868 43.576 35.3712 44.9494 31.9141 44.9494H13.0353C5.84342 44.9494 0 39.106 0 31.9141V13.0353C0 9.57814 1.37336 6.26255 3.81796 3.81796C6.26255 1.37336 9.57814 0 13.0353 0ZM12.5858 4.49494C10.44 4.49494 8.38204 5.34737 6.8647 6.8647C5.34737 8.38204 4.49494 10.44 4.49494 12.5858V32.3635C4.49494 36.836 8.11336 40.4544 12.5858 40.4544H32.3635C34.5094 40.4544 36.5673 39.602 38.0847 38.0847C39.602 36.5673 40.4544 34.5094 40.4544 32.3635V12.5858C40.4544 8.11336 36.836 4.49494 32.3635 4.49494H12.5858ZM34.2739 7.86614C35.019 7.86614 35.7335 8.16212 36.2604 8.68898C36.7873 9.21583 37.0832 9.93039 37.0832 10.6755C37.0832 11.4206 36.7873 12.1351 36.2604 12.662C35.7335 13.1888 35.019 13.4848 34.2739 13.4848C33.5288 13.4848 32.8142 13.1888 32.2874 12.662C31.7605 12.1351 31.4646 11.4206 31.4646 10.6755C31.4646 9.93039 31.7605 9.21583 32.2874 8.68898C32.8142 8.16212 33.5288 7.86614 34.2739 7.86614ZM22.4747 11.2373C25.455 11.2373 28.3133 12.4213 30.4207 14.5287C32.5281 16.6361 33.712 19.4944 33.712 22.4747C33.712 25.455 32.5281 28.3133 30.4207 30.4207C28.3133 32.5281 25.455 33.712 22.4747 33.712C19.4944 33.712 16.6361 32.5281 14.5287 30.4207C12.4213 28.3133 11.2373 25.455 11.2373 22.4747C11.2373 19.4944 12.4213 16.6361 14.5287 14.5287C16.6361 12.4213 19.4944 11.2373 22.4747 11.2373ZM22.4747 15.7323C20.6865 15.7323 18.9715 16.4426 17.7071 17.7071C16.4426 18.9715 15.7323 20.6865 15.7323 22.4747C15.7323 24.2629 16.4426 25.9778 17.7071 27.2423C18.9715 28.5067 20.6865 29.2171 22.4747 29.2171C24.2629 29.2171 25.9778 28.5067 27.2423 27.2423C28.5067 25.9778 29.2171 24.2629 29.2171 22.4747C29.2171 20.6865 28.5067 18.9715 27.2423 17.7071C25.9778 16.4426 24.2629 15.7323 22.4747 15.7323Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "twitter" ? ( <svg width="49" height="39" viewBox="0 0 49 39" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M49 4.58824C47.1965 5.39118 45.2524 5.91882 43.238 6.17118C45.2992 4.95529 46.892 3.02824 47.6415 0.711176C45.6974 1.85824 43.5425 2.66118 41.2706 3.12C39.4202 1.14706 36.8203 0 33.869 0C28.3647 0 23.8676 4.40471 23.8676 9.84176C23.8676 10.6218 23.9613 11.3788 24.1252 12.09C15.7868 11.6771 8.36185 7.75412 3.41969 1.81235C2.55306 3.25765 2.06119 4.95529 2.06119 6.74471C2.06119 10.1629 3.81788 13.1912 6.53489 14.9118C4.87189 14.9118 3.326 14.4529 1.9675 13.7647V13.8335C1.9675 18.6053 5.43403 22.5971 10.0249 23.4918C8.55094 23.8868 7.00357 23.9418 5.5043 23.6524C6.14047 25.608 7.38639 27.3193 9.06691 28.5455C10.7474 29.7718 12.7781 30.4514 14.8733 30.4888C11.3216 33.2428 6.91902 34.7314 2.3891 34.71C1.59273 34.71 0.796367 34.6641 0 34.5724C4.45029 37.3712 9.74379 39 15.412 39C33.869 39 44.011 23.9965 44.011 10.9888C44.011 10.5529 44.011 10.14 43.9876 9.70412C45.9551 8.32765 47.6415 6.58412 49 4.58824Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : ( <svg width="53" height="37" viewBox="0 0 53 37" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M52.9878 13.04C53.1069 9.6163 52.3532 6.21826 50.7969 3.16135C49.741 1.90703 48.2755 1.06056 46.6559 0.769439C39.9569 0.165541 33.2301 -0.0819771 26.5045 0.0279454C19.8034 -0.0869662 13.1009 0.152565 6.42537 0.74552C5.10558 0.984034 3.88422 1.59906 2.91031 2.51554C0.743496 4.50083 0.502739 7.89735 0.261982 10.7676C-0.0873274 15.9284 -0.0873274 21.1065 0.261982 26.2673C0.331634 27.8828 0.573748 29.4864 0.984253 31.0511C1.27455 32.2592 1.86187 33.3769 2.69363 34.3041C3.67416 35.2691 4.92397 35.9191 6.28091 36.1698C11.4714 36.8063 16.7014 37.0701 21.9301 36.9591C30.3566 37.0787 37.7479 36.9591 46.4873 36.2894C47.8776 36.0541 49.1626 35.4033 50.1709 34.4237C50.845 33.7537 51.3484 32.9338 51.6396 32.0318C52.5006 29.4069 52.9235 26.6598 52.8915 23.8993C52.9878 22.5598 52.9878 14.4751 52.9878 13.04ZM21.0634 25.3344V10.5285L35.3162 17.9673C31.3197 20.1679 26.0471 22.6555 21.0634 25.3344Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) } </motion.a> ) }
212303cb8e1886c2261b215c677767d5
{ "intermediate": 0.36608830094337463, "beginner": 0.45848366618156433, "expert": 0.17542800307273865 }
37,691
import os from tkinter import Tk, Button, Label, filedialog, messagebox, IntVar, Checkbutton, Spinbox # Assuming Image importing from PIL is already done (if needed) class ImageBrowser: def __init__(self, master): self.master = master self.master.title("Image Browser") # Initialize variables self.img_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp'] self.file_index = [] self.current_index = 0 self.image_folder = "" self.limit_reached = False # Create widgets self.button_open = Button(master, text="Open Image Folder", command=self.open_image_folder) self.button_open.pack() self.button_previous = Button(master, text="Previous Image", command=self.previous_image) self.button_previous.pack(side='left') self.button_next = Button(master, text="Next Image", command=self.next_image) self.button_next.pack(side='right') self.limit_var = IntVar() self.limit_checkbox = Checkbutton(master, text="Limit", variable=self.limit_var) self.limit_checkbox.pack() self.limit_spinbox = Spinbox(master, from_=1, to=100, width=5) self.limit_spinbox.pack() # Image info labels self.label_total_img = Label(master, text="Total Images: 0") self.label_total_img.pack() self.label_img_count = Label(master, text="Image Count: 0") self.label_img_count.pack() self.label_img_name = Label(master, text="Image Name: -") self.label_img_name.pack() self.update_buttons_state() def update_buttons_state(self): state = 'normal' if self.file_index else 'disabled' self.button_next.config(state=state) self.button_previous.config(state=state) def open_image_folder(self): folder_path = filedialog.askdirectory() if folder_path: self.image_folder = folder_path self.file_index = [ os.path.join(self.image_folder, f) for f in os.listdir(self.image_folder) if os.path.splitext(f)[1].lower() in self.img_EXTENSIONS ] self.file_index.sort() if not self.file_index: messagebox.showinfo("No Images", "No images found in the selected folder.") return self.current_index = 0 self.limit_reached = False self.label_total_img.config(text=f"Total Images: {len(self.file_index)}") self.update_image_info() self.update_buttons_state() elif not self.file_index: messagebox.showinfo("No Folder", "Please select a folder to start browsing images.") def next_image(self): if not self.file_index: messagebox.showinfo("No Images", "Please open an image folder first.") return next_count = (self.current_index + 1) % len(self.file_index) if self.limit_var.get(): limit = int(self.limit_spinbox.get()) if self.limit_reached: self.limit_reached = False self.current_index = next_count elif next_count % limit == 0: # Limit is reached self.label_img_name.config(text="End of limit") self.limit_reached = True self.label_img_count.config(text=f"Image Count: {next_count}") return self.current_index = next_count self.update_image_info() def previous_image(self): if not self.file_index: messagebox.showinfo("No Images", "Please open an image folder first.") return self.limit_reached = False # View the previous image regardless of limit self.current_index = (self.current_index - 1) % len(self.file_index) self.update_image_info() def update_image_info(self): current_img_path = self.file_index[self.current_index] img_name = os.path.basename(current_img_path) self.label_img_name.config(text=f"Image Name: {img_name}") self.label_img_count.config(text=f"Image Count: {self.current_index + 1}") # You would add logic here to actually display the image, if desired if __name__ == "__main__": root = Tk() image_browser = ImageBrowser(root) root.mainloop() instead of calling the next iamge number in the index, make it add a random next image name to add to the pool
4077e10eeccdd1d0355dd9f10940e43d
{ "intermediate": 0.3628730773925781, "beginner": 0.4523906111717224, "expert": 0.18473637104034424 }
37,692
c++ insert in unordered_map if not exists
1b29f6fba29a6293150e963fb9fba917
{ "intermediate": 0.32572054862976074, "beginner": 0.3487844169139862, "expert": 0.3254949748516083 }
37,693
How do I start a jetty WebsocketListener instance from another thread
961c100855de4b7553071b355b2cb3e0
{ "intermediate": 0.5845158696174622, "beginner": 0.14953970909118652, "expert": 0.2659444212913513 }
37,694
c++ binary search element in array
5d7753003299828c8270fb4bd4fb1a04
{ "intermediate": 0.3203500509262085, "beginner": 0.3143056333065033, "expert": 0.3653442859649658 }
37,695
Следующая программа: include #include <vector> #include <istream> #include <fstream> #include <chrono> struct Date { int day; int month; int year; Date(int d, int m, int y) : day(d), month(m), year(y) {} bool operator>(const Date& other) const { if (year > other.year) { return true; } else if (year == other.year && month > other.month) { return true; } else if (year == other.year && month == other.month && day > other.day) { return true; } return false; } bool operator<(const Date& other) const { return !(operator>(other) || operator==(other)); } bool operator==(const Date& other) const { return (year == other.year && month == other.month && day == other.day); } }; struct GroupNumber { char letter; int number; GroupNumber(char l, int n) : letter(l), number(n) {} bool operator>(const GroupNumber& other) const { if (letter > other.letter) { return true; } else if (letter == other.letter && number > other.number) { return true; } return false; } bool operator<(const GroupNumber& other) const { return !(operator>(other) || operator==(other)); } bool operator==(const GroupNumber& other) const { return (letter == other.letter && number == other.number); } }; struct Fio { std::string surname; std::string name; std::string patronymic; Fio(std::string s, std::string n, std::string p) : surname(s), name(n), patronymic§ {} bool operator>(const Fio& other) const { if (surname > other.surname) { return true; } else if (surname == other.surname && name > other.name) { return true; } else if (surname == other.surname && name == other.name && patronymic > other.patronymic) { return true; } return false; } bool operator<(const Fio& other) const { return !(operator>(other) || operator==(other)); } bool operator==(const Fio& other) const { return (surname == other.surname && name == other.name && patronymic == other.patronymic); } }; struct Sorts { Fio fios; GroupNumber groupNumbers; int n; Sorts(const Fio& f, const GroupNumber& g, int num) : fios(f), groupNumbers(g), n(num) {} }; std::vector<Sorts> readFromFile(const std::string& filename) { std::vector<Sorts> sorts; std::ifstream file(filename); int n = 0; if (file.is_open()) { std::string surname, name, patronymic; char letter; int number; while (file >> surname >> name >> patronymic >> letter >> number) { Fio fio(surname, name, patronymic); GroupNumber groupNumber(letter, number); sorts.push_back(Sorts(fio, groupNumber, n)); n += 1; } file.close(); } else { std::cerr << “Enable open file” << filename << std::endl; } return sorts; } int partOfQuickSort(std::vector<Sorts>& sorts, int start, int end) { int left = start; for (int i = start; i < end; i++) { if (sorts[i].fios < sorts[end].fios || sorts[i].fios == sorts[end].fios || sorts[i].groupNumbers < sorts[end].groupNumbers || sorts[i].groupNumbers == sorts[end].groupNumbers) { Sorts temp = sorts[left]; sorts[left] = sorts[i]; sorts[i] = temp; left++; } } Sorts temp = sorts[left]; sorts[left] = sorts[end]; sorts[end] = temp; return left; } void quickSort(std::vector<Sorts>& sorts, int start, int end) { if (start >= end) return; int rightStart = partOfQuickSort(sorts, start, end); quickSort(sorts, start, rightStart); quickSort(sorts, rightStart + 1, end); } void quickSort(std::vector<Sorts>& sorts) { quickSort(sorts, 0, sorts.size()); } double quickSortWithTime(std::vector<Sorts>& sorts) { double timeQuick; auto start = std::chrono::high_resolution_clock::now(); quickSort(sorts); auto end = std::chrono::high_resolution_clock::now(); std::chrono::duration<double> duration = end - start; timeQuick = duration.count(); return timeQuick; } void writeFileQuickSort(std::string readFile, std::string file) { std::vector <Sorts> sorts = readFromFile(readFile); std::ofstream out(file); double timeQuick = quickSortWithTime(sorts); for (int i = 0; i < sorts.size(); i++) { out << sorts[i].fios.surname << " " << sorts[i].fios.name << " " << sorts[i].fios.patronymic << " " << sorts[i].groupNumbers.letter << sorts[i].groupNumbers.number << std::endl; } out << “Time taken for QuickSort: " << timeQuick << " seconds” << std::endl; } int main() { writeFileQuickSort(“Shortr.txt”, “QuickSort.txt”); } крашится на строке if (sorts[i].fios < sorts[end].fios || sorts[i].fios == sorts[end].fios || sorts[i].groupNumbers < sorts[end].groupNumbers || sorts[i].groupNumbers == sorts[end].groupNumbers). Почему?
d04515fa026180af2fb2a032b8455ed3
{ "intermediate": 0.27891603112220764, "beginner": 0.334227979183197, "expert": 0.38685593008995056 }
37,696
how can I improve this function and can we ad a jsdoc for it: const getUsage = async (req, res) => { try { const { user, company, plan, subscription } = req const { ValidationConsumption } = req.app.models const team = user.teams[0] let where = { companyId: company.id, subscriptionId: subscription.id, isLive: false, deletedAt: null, } const privilege = getUserPrivilege(team.TeamUsers.privilegeLevel - 1) if (privilege.level <= 2) { where.teamId = team.id } const usedValidations = await ValidationConsumption.findOne({ attributes: [ [Sequelize.literal(`COUNT(DISTINCT "validationId")`), 'distinctCount'], ], where, raw: true, }) let usage = { usedAmount: usedValidations.distinctCount, totalAmount: privilege.level > 2 ? plan.validationCount : team.alocatedValidations, userCount: subscription.userCount, startDate: subscription.startDate, endDate: subscription.endDate, } return res.status(200).json({ usage }) } catch (error) { console.error('Error fetching me:', error) res.status(500).json({ message: 'Internal Server Error' }) } }
979cc0e0e36ee6502e49587a1c3e5b74
{ "intermediate": 0.43433859944343567, "beginner": 0.417288601398468, "expert": 0.1483728140592575 }
37,697
Как установить Docker compouse на Windows
088b8a633ed844428a19aca65c2218c3
{ "intermediate": 0.35907021164894104, "beginner": 0.3008335828781128, "expert": 0.34009623527526855 }
37,698
fait en sorte que rien ne s'affiche si l'url du bouton réseaux n'est pas prit en compte, car actuellement ça fait crash la page : Uncaught Error: Unsupported social network URL: Réseau 1import { useEffect, useState, useRef } from "react"; import { Link, useLocation } from "react-router-dom"; import BoutonReseau from "../../components/Artiste/BoutonReseau"; import { motion } from "framer-motion"; import axios from "axios"; export default function PageEvenement() { const location = useLocation(); const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [nomArtiste, setNomArtiste] = useState(location.state?.nomArtiste); const [date, setDate] = useState(location.state?.date); const [heure, setHeure] = useState(location.state?.heure); const titleRef = useRef<HTMLHeadingElement>(null); const [description, setDescription] = useState( location.state?.description || "Description par défaut si aucune description n'est passée." ); const [socialLinks, setSocialLinks] = useState<{ reseau: string }[]>([]); const params = new URLSearchParams(window.location.search); const idArtiste = params.get("id"); const [windowWidth, setWindowWidth] = useState(window.innerWidth); const [infosGridPosition, setInfosGridPosition] = useState<"top" | "bottom">( "top" ); function getSocialNetworkType( url: string ): "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube" { if (url.includes("soundcloud")) { return "soundcloud"; } else if (url.includes("spotify")) { return "spotify"; } else if (url.includes("instagram")) { return "instagram"; } else if (url.includes("twitter")) { return "twitter"; } else if (url.includes("youtube")) { return "youtube"; } else { throw new Error(`Unsupported social network URL: ${url}`); } } useEffect(() => { axios .get("http://localhost:8080/getSocialLinks/" + idArtiste) .then((response) => { setSocialLinks(response.data); }) .catch((error) => { console.error( "Erreur lors de la récupération des liens de réseaux sociaux", error ); }); const handleResize = () => { setWindowWidth(window.innerWidth); }; window.addEventListener("resize", handleResize); return () => { window.removeEventListener("resize", handleResize); }; }, []); useEffect(() => { if (titleRef.current) { // regarde si la width du titre est plus grande que 25% de la width de la fenetre - 2*3rem if (titleRef.current.offsetWidth > windowWidth / 4 - 2 * 3 * 16) { setInfosGridPosition("top"); } else { setInfosGridPosition("bottom"); } } }, [titleRef, windowWidth]); const infosVariants = { initial: { opacity: 0, transition: { duration: 0.1, ease: "easeInOut", }, }, visible: { opacity: 1, transition: { delay: 0.8, duration: 0.6, ease: "easeInOut", }, }, }; const overlayVariants = { initial: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 20%, rgba(0, 0, 0, 0) 60%)", transition: { duration: 0.1, ease: "easeInOut", }, }, extended: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 40%, rgba(0, 0, 0, 0) 100%)", transition: { delay: 0.6, duration: 0.6, ease: "easeInOut", }, }, }; return ( <div id="PageArtiste"> <motion.div className="overlay" variants={overlayVariants} initial="initial" animate={infosGridPosition === "top" ? "extended" : "initial"} /> <img src={"http://localhost:8080/getImageArtiste/" + idArtiste} alt="image de fond" /> <div className="content" style={{ columnGap: infosGridPosition === "top" ? "0" : "5rem", rowGap: windowWidth > 991 ? "0" : "5rem", }} > <h3 ref={titleRef}> {nomArtiste .toUpperCase() .split(" ") .map((mot: string, index: number) => { return ( <span key={index}> {mot} <br /> </span> ); })} </h3> <motion.div className="infos" variants={infosVariants} initial="initial" animate="visible" exit="initial" style={{ gridArea: infosGridPosition }} > <p className="description">{description}</p> <div className="les-reseaux"> {socialLinks.map((link, index) => { const type = getSocialNetworkType(link.reseau); console.log(type); return ( <BoutonReseau key={index} href={link.reseau} type={type} /> ); })} {/* <BoutonReseau href="https://www.soundcloud.com/" type='soundcloud' /> <BoutonReseau href="https://www.spotify.com/" type='spotify'/> <BoutonReseau href="https://www.instagram.com/" type='instagram'/> <BoutonReseau href="https://www.twitter.com/" type='twitter'/> <BoutonReseau href="https://www.youtube.com/" type='youtube'/> */} </div> <Link to="/programmation" state={{ comesFromPageArtist: idArtiste, oldX: oldX, oldY: oldY, oldGroupes: oldGroupes, }} className="btn-retour" > <svg width="36" height="28" viewBox="0 0 36 28" fill="none" xmlns="http://www.w3.org/2000/svg" > <rect x="6.52539" y="0.321533" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(45 6.52539 0.321533)" /> <rect x="3.87891" y="25.5957" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(-45 3.87891 25.5957)" /> </svg> </Link> </motion.div> <div className="date-heure"> <h4>{date}</h4> <h4>{heure}</h4> </div> </div> </div> ); } import { motion } from 'framer-motion'; import {useState} from 'react' type Props = { href : string; type: "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube"; } export default function BoutonReseau(props: Props) { const [isHovered, setIsHovered] = useState(false) const btnVariants = { default:{ backgroundColor:"#E45A3B00", transition:{ duration: 0.3, ease:"easeOut", } }, hover:{ backgroundColor:"#E45A3B", transition:{ duration: 0.3, ease:"easeOut", } } } const svgVariants = { default:{ fill:"#E45A3B", transition:{ duration:0.3, ease: [1, 0, 0,1] } }, hover:{ fill:"#FFFBEE", transition:{ duration:0.3, ease: [1, 0, 0,1] } } } return ( <motion.a href={props.href} className='btn-reseau' onMouseEnter={() => setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} variants={btnVariants} initial="default" animate={isHovered ? "hover" : "default"} > { props.type === "soundcloud" ? ( <svg width="51" height="22" viewBox="0 0 51 22" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M24.0742 3.20787V21.779H44.0157C48.2271 21.4821 50.1166 18.878 50.1166 15.68C50.1166 12.2993 47.567 9.581 44.1523 9.581C43.2873 9.581 42.5588 9.76374 41.8076 10.0835C41.2612 4.73833 36.7312 0.558105 31.1539 0.558105C28.4905 0.558105 25.9637 1.56319 24.0742 3.20787ZM22.071 5.53783C21.388 5.12666 20.6596 4.80686 19.8628 4.64696V21.779H23.0271V4.28148C22.6856 4.64696 22.3669 5.10382 22.071 5.53783ZM16.7214 4.30432V21.779H18.8157V4.37285C18.3832 4.30432 17.9506 4.28148 17.4954 4.28148C17.2222 4.28148 16.9718 4.28148 16.7214 4.30432ZM12.5555 5.7891V21.779H14.6271V4.73833C13.8758 4.9896 13.1701 5.35509 12.5555 5.7891ZM8.75387 11.4998C8.61729 11.4998 8.4807 11.3627 8.32135 11.2942V21.779H10.4157V7.75358C9.57339 8.85003 9.00428 10.1521 8.75387 11.4998ZM4.10996 10.8602V21.5735C4.58801 21.7105 5.13435 21.779 5.72622 21.779H6.22704V10.6775C6.04492 10.6546 5.86281 10.6318 5.72622 10.6318C5.13435 10.6318 4.58801 10.7231 4.10996 10.8602ZM0.0351562 16.2054C0.0351562 17.9186 0.809142 19.4262 2.01565 20.4542V11.9795C0.809142 12.9846 0.0351562 14.515 0.0351562 16.2054Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "spotify" ? ( <svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M31.8 17.8C25.4 14 14.7 13.6 8.6 15.5C7.6 15.8 6.6 15.2 6.3 14.3C6 13.3 6.6 12.3 7.5 12C14.6 9.9 26.3 10.3 33.7 14.7C34.6 15.2 34.9 16.4 34.4 17.3C33.9 18 32.7 18.3 31.8 17.8ZM31.6 23.4C31.1 24.1 30.2 24.4 29.5 23.9C24.1 20.6 15.9 19.6 9.6 21.6C8.8 21.8 7.9 21.4 7.7 20.6C7.5 19.8 7.9 18.9 8.7 18.7C16 16.5 25 17.6 31.2 21.4C31.8 21.7 32.1 22.7 31.6 23.4ZM29.2 28.9C28.8 29.5 28.1 29.7 27.5 29.3C22.8 26.4 16.9 25.8 9.9 27.4C9.2 27.6 8.6 27.1 8.4 26.5C8.2 25.8 8.7 25.2 9.3 25C16.9 23.3 23.5 24 28.7 27.2C29.4 27.5 29.5 28.3 29.2 28.9ZM20 0C17.3736 0 14.7728 0.517315 12.3463 1.52241C9.91982 2.5275 7.71504 4.00069 5.85786 5.85786C2.10714 9.60859 0 14.6957 0 20C0 25.3043 2.10714 30.3914 5.85786 34.1421C7.71504 35.9993 9.91982 37.4725 12.3463 38.4776C14.7728 39.4827 17.3736 40 20 40C25.3043 40 30.3914 37.8929 34.1421 34.1421C37.8929 30.3914 40 25.3043 40 20C40 17.3736 39.4827 14.7728 38.4776 12.3463C37.4725 9.91982 35.9993 7.71504 34.1421 5.85786C32.285 4.00069 30.0802 2.5275 27.6537 1.52241C25.2272 0.517315 22.6264 0 20 0Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "instagram" ? ( <svg width="45" height="45" viewBox="0 0 45 45" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M13.0353 0H31.9141C39.106 0 44.9494 5.84342 44.9494 13.0353V31.9141C44.9494 35.3712 43.576 38.6868 41.1314 41.1314C38.6868 43.576 35.3712 44.9494 31.9141 44.9494H13.0353C5.84342 44.9494 0 39.106 0 31.9141V13.0353C0 9.57814 1.37336 6.26255 3.81796 3.81796C6.26255 1.37336 9.57814 0 13.0353 0ZM12.5858 4.49494C10.44 4.49494 8.38204 5.34737 6.8647 6.8647C5.34737 8.38204 4.49494 10.44 4.49494 12.5858V32.3635C4.49494 36.836 8.11336 40.4544 12.5858 40.4544H32.3635C34.5094 40.4544 36.5673 39.602 38.0847 38.0847C39.602 36.5673 40.4544 34.5094 40.4544 32.3635V12.5858C40.4544 8.11336 36.836 4.49494 32.3635 4.49494H12.5858ZM34.2739 7.86614C35.019 7.86614 35.7335 8.16212 36.2604 8.68898C36.7873 9.21583 37.0832 9.93039 37.0832 10.6755C37.0832 11.4206 36.7873 12.1351 36.2604 12.662C35.7335 13.1888 35.019 13.4848 34.2739 13.4848C33.5288 13.4848 32.8142 13.1888 32.2874 12.662C31.7605 12.1351 31.4646 11.4206 31.4646 10.6755C31.4646 9.93039 31.7605 9.21583 32.2874 8.68898C32.8142 8.16212 33.5288 7.86614 34.2739 7.86614ZM22.4747 11.2373C25.455 11.2373 28.3133 12.4213 30.4207 14.5287C32.5281 16.6361 33.712 19.4944 33.712 22.4747C33.712 25.455 32.5281 28.3133 30.4207 30.4207C28.3133 32.5281 25.455 33.712 22.4747 33.712C19.4944 33.712 16.6361 32.5281 14.5287 30.4207C12.4213 28.3133 11.2373 25.455 11.2373 22.4747C11.2373 19.4944 12.4213 16.6361 14.5287 14.5287C16.6361 12.4213 19.4944 11.2373 22.4747 11.2373ZM22.4747 15.7323C20.6865 15.7323 18.9715 16.4426 17.7071 17.7071C16.4426 18.9715 15.7323 20.6865 15.7323 22.4747C15.7323 24.2629 16.4426 25.9778 17.7071 27.2423C18.9715 28.5067 20.6865 29.2171 22.4747 29.2171C24.2629 29.2171 25.9778 28.5067 27.2423 27.2423C28.5067 25.9778 29.2171 24.2629 29.2171 22.4747C29.2171 20.6865 28.5067 18.9715 27.2423 17.7071C25.9778 16.4426 24.2629 15.7323 22.4747 15.7323Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "twitter" ? ( <svg width="49" height="39" viewBox="0 0 49 39" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M49 4.58824C47.1965 5.39118 45.2524 5.91882 43.238 6.17118C45.2992 4.95529 46.892 3.02824 47.6415 0.711176C45.6974 1.85824 43.5425 2.66118 41.2706 3.12C39.4202 1.14706 36.8203 0 33.869 0C28.3647 0 23.8676 4.40471 23.8676 9.84176C23.8676 10.6218 23.9613 11.3788 24.1252 12.09C15.7868 11.6771 8.36185 7.75412 3.41969 1.81235C2.55306 3.25765 2.06119 4.95529 2.06119 6.74471C2.06119 10.1629 3.81788 13.1912 6.53489 14.9118C4.87189 14.9118 3.326 14.4529 1.9675 13.7647V13.8335C1.9675 18.6053 5.43403 22.5971 10.0249 23.4918C8.55094 23.8868 7.00357 23.9418 5.5043 23.6524C6.14047 25.608 7.38639 27.3193 9.06691 28.5455C10.7474 29.7718 12.7781 30.4514 14.8733 30.4888C11.3216 33.2428 6.91902 34.7314 2.3891 34.71C1.59273 34.71 0.796367 34.6641 0 34.5724C4.45029 37.3712 9.74379 39 15.412 39C33.869 39 44.011 23.9965 44.011 10.9888C44.011 10.5529 44.011 10.14 43.9876 9.70412C45.9551 8.32765 47.6415 6.58412 49 4.58824Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : ( <svg width="53" height="37" viewBox="0 0 53 37" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M52.9878 13.04C53.1069 9.6163 52.3532 6.21826 50.7969 3.16135C49.741 1.90703 48.2755 1.06056 46.6559 0.769439C39.9569 0.165541 33.2301 -0.0819771 26.5045 0.0279454C19.8034 -0.0869662 13.1009 0.152565 6.42537 0.74552C5.10558 0.984034 3.88422 1.59906 2.91031 2.51554C0.743496 4.50083 0.502739 7.89735 0.261982 10.7676C-0.0873274 15.9284 -0.0873274 21.1065 0.261982 26.2673C0.331634 27.8828 0.573748 29.4864 0.984253 31.0511C1.27455 32.2592 1.86187 33.3769 2.69363 34.3041C3.67416 35.2691 4.92397 35.9191 6.28091 36.1698C11.4714 36.8063 16.7014 37.0701 21.9301 36.9591C30.3566 37.0787 37.7479 36.9591 46.4873 36.2894C47.8776 36.0541 49.1626 35.4033 50.1709 34.4237C50.845 33.7537 51.3484 32.9338 51.6396 32.0318C52.5006 29.4069 52.9235 26.6598 52.8915 23.8993C52.9878 22.5598 52.9878 14.4751 52.9878 13.04ZM21.0634 25.3344V10.5285L35.3162 17.9673C31.3197 20.1679 26.0471 22.6555 21.0634 25.3344Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) } </motion.a> ) }
747fd214057c37b4bbf1c27fc09a8f7c
{ "intermediate": 0.33618292212486267, "beginner": 0.3373759984970093, "expert": 0.3264411687850952 }
37,699
write detail including every step for reinforcement learning for continuous space and continuous action using Deep Deterministic Policy Gradients in Tensorflow and how to train it and how to save trained model , how to use once trained
5432335388d2e1367cbb6b96d56be7a7
{ "intermediate": 0.11996881663799286, "beginner": 0.02082338184118271, "expert": 0.859207808971405 }
37,700
Read this text and understand its meaning to get ready for coding it: The MoE-Mamba model introduces several key changes to the existing Transformer architecture: 1. **Selection Mechanism**: Unlike Transformers, which compress the entire input context into a hidden state and lose information needed to effectively utilize the context, Mamba introduces a selection mechanism that is input-dependent. This means the model can adjust its output at each step as the input changes. This is achieved by making a few of the state space variables input-dependent instead of input-invariant, allowing the model to project the input onto the state space variable at each time step [Source 2]. 2. **Computational Efficiency**: The introduction of the selection mechanism creates a computational overhead issue. To address this, Mamba designs a machine architecture-aware algorithm that takes full advantage of modern GPU memory architecture, avoiding unnecessary movement of data in and out of high bandwidth memory (HBM) as much as possible [Source 2]. 3. **Mixture of Experts (MoE) Layer**: The MoE-Mamba model integrates a Mixture of Experts (MoE) layer, a method for distributing a task among multiple sub-models, or "experts". Each expert handles a different part of the data, enabling the model to process complex sequences of data very efficiently [Source 0]. 4. **State Space Models**: Mamba is based on the concept of state space models, which are good at compressing the input context but lose information needed to effectively utilize the context. The Mamba model addresses this issue by creating a selection mechanism that is input-dependent, allowing the model to adjust its output at each step as the input changes [Source 2]. Mathematically, the addition of the selection mechanism in Mamba can be represented as follows: Let's denote the state space variables as `x` and the input as `u`. In a typical Transformer model, the state space variables are updated based on the input as follows: `dx/dt = f(x)` where `f(x)` is a function that depends on the state space variables `x`. However, in Mamba, the update rule is modified to be input-dependent: `dx/dt = g(x, u)` where `g(x, u)` is a function that depends on the state space variables `x` and the input `u`. This modification allows the model to adjust its output at each step as the input changes, leading to improved computational efficiency [Source 2].
f2d0f42f039cc732dd42ed2cef2ee2e9
{ "intermediate": 0.27063772082328796, "beginner": 0.2500167489051819, "expert": 0.47934553027153015 }
37,701
Context constant: here is the code we're working with : import torch import torch.nn as nn import torch.nn.functional as F from torchtext.data.utils import get_tokenizer from torchtext.vocab import build_vocab_from_iterator import numpy as np import matplotlib.pyplot as plt from tqdm import tqdm # Define the Expert module class Expert(nn.Module): def __init__(self, input_dim, hidden_dim): super(Expert, self).__init__() self.fc1 = nn.Linear(input_dim, hidden_dim) self.activation = nn.ReLU() self.fc2 = nn.Linear(hidden_dim, input_dim) def forward(self, x): x = self.activation(self.fc1(x)) x = self.fc2(x) return x # Define the MoELayer module class MoELayer(nn.Module): def __init__(self, num_experts, input_dim, hidden_dim): super(MoELayer, self).__init__() self.experts = nn.ModuleList([Expert(input_dim, hidden_dim) for _ in range(num_experts)]) self.gate = nn.Linear(input_dim, num_experts) def forward(self, x): gating_distribution = F.softmax(self.gate(x), dim=1) expert_outputs = torch.stack([expert(x) for expert in self.experts], dim=1) output = torch.einsum('bi,bij->bj', gating_distribution, expert_outputs) # This efficiently computes the weighted sum of expert outputs return output # Define the SelectionMechanism module class SelectionMechanism(nn.Module): def __init__(self, input_dim, state_dim): super(SelectionMechanism, self).__init__() self.selection_layer = nn.Linear(input_dim + state_dim, state_dim) def forward(self, state, u): # Ensure both state and u have a batch dimension (state already has it) combined = torch.cat((state, u), dim=1) new_state = self.selection_layer(combined) return new_state # Define the StateSpaceMamba module class StateSpaceMamba(nn.Module): def __init__(self, input_dim, state_dim, num_experts, hidden_dim): super(StateSpaceMamba, self).__init__() self.state_dim = state_dim self.state = nn.Parameter(torch.zeros(1, state_dim)) self.input_layer = nn.Linear(input_dim, state_dim) self.selection_mechanism = SelectionMechanism(input_dim, state_dim) self.moe_layer = MoELayer(num_experts, state_dim, hidden_dim) def forward(self, input_sequence): outputs = [] state = self.state for u in input_sequence.split(1, dim=0): # Assuming input_sequence is shape (sequence_length, input_dim) u = u.squeeze(0) # Remove the sequence dimension state = self.selection_mechanism(state, u.unsqueeze(0)) # Add batch dimension to u u_projected = self.input_layer(u) output = self.moe_layer(u_projected + state) outputs.append(output) return torch.stack(outputs, dim=0) # Reshape to (sequence_length, input_dim) # Load the dataset print("Loading dataset...") with open("C:/Users/Dell-PC/Desktop/The-Physics-Dataset.txt", "r") as f: text = f.read() # Tokenize the text print("Tokenizing text...") tokenizer = get_tokenizer("basic_english") tokens = tokenizer(text.lower()) # Build the vocabulary print("Building vocabulary...") vocab = build_vocab_from_iterator([tokens], specials=["<unk>", "<pad>", "<sos>", "<eos>"]) vocab.set_default_index(vocab["<unk>"]) numericalized_tokens = [vocab["<sos>"]] + [vocab[token] for token in tokens] + [vocab["<eos>"]] # Prepare the input sequence batch_size = 24 seq_length = 12 vocab_size = len(vocab) num_batches = len(numericalized_tokens) // (batch_size * seq_length) if len(numericalized_tokens) < (batch_size * seq_length): raise ValueError(f"Dataset is too small for the chosen batch size and sequence length. Tokens available: {len(numericalized_tokens)}") # Calculate the number of batches and ensure it’s not zero if num_batches == 0: raise ValueError(f"Number of batches is zero. Check if your dataset is large enough or consider reducing batch size/sequence length.") numericalized_tokens = numericalized_tokens[:num_batches * batch_size * seq_length] input_sequence = torch.tensor(numericalized_tokens).view(batch_size, -1) # Define the model num_experts = 5 state_dim = 256 hidden_dim = 256 input_dim = vocab_size model = StateSpaceMamba(input_dim, state_dim, num_experts, hidden_dim) learning_rate = 0.001 # Training loop print("Training model...") def train(model, data, num_batches, batch_size, seq_length, vocab_size, epochs): model.train() loss_fn = nn.CrossEntropyLoss() optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) loss_history = [] for epoch in tqdm(range(epochs)): total_loss = 0.0 for i in range(0, data.size(1) - seq_length, seq_length): # Corrected to batch inputs and targets inputs = data[:, i:i+seq_length] targets = data[:, i+1:i+1+seq_length].reshape(-1) optimizer.zero_grad() outputs = model(inputs) loss = loss_fn(outputs.reshape(-1, vocab_size), targets) loss.backward() optimizer.step() total_loss += loss.item() epoch_loss = total_loss / num_batches loss_history.append(epoch_loss) print(f"Epoch {epoch}, Loss: {epoch_loss}") return loss_history epochs = 10 loss_history = train(model, input_sequence, num_batches, batch_size, seq_length, vocab_size, epochs) # Plot loss history plt.plot(loss_history) plt.xlabel("Epochs") plt.ylabel("Loss") plt.title("Training Loss History") plt.show() # Generate function print("Generating text...") def generate(model, start_seq, length, temperature=0.8): model.eval() state = model.state numericalized_start = torch.tensor([vocab[token] for token in tokenizer(start_seq.lower())], dtype=torch.long).unsqueeze(0) input_seq = numericalized_start generated_sequence = [] for i in range(length): output = model(input_seq) # Sample from the network as a multinomial distribution output_prob = F.softmax(output[:, -1] / temperature, dim=-1).detach() predicted_index = torch.multinomial(output_prob, 1).item() generated_sequence.append(predicted_index) input_seq = torch.cat((input_seq, torch.tensor([[predicted_index]])), dim=1) return " ".join(vocab.lookup_tokens(generated_sequence)) # Generating a sample start_seq = "The fundamental concepts of physics" # Start of sequence to begin text generation generated_text = generate(model, start_seq, length=100) # Length of generated sequence print(generated_text)
70f6143d1d9b51429969a3c1750c8b95
{ "intermediate": 0.35456427931785583, "beginner": 0.3427734375, "expert": 0.3026622235774994 }
37,702
Traceback (most recent call last): File "/home/alejandro/Documents/projects/forks/postoga/./postoga.py", line 262, in <module> main() File "/home/alejandro/Documents/projects/forks/postoga/./postoga.py", line 258, in main master.run() File "/home/alejandro/Documents/projects/forks/postoga/./postoga.py", line 84, in run self.table = query_table(self.path) File "/home/alejandro/Documents/projects/forks/postoga/modules/make_query_table.py", line 47, in query_table loss["helper"] = loss["transcript"].str.rsplit(".", 1).str[0] File "/home/alejandro/miniconda3/envs/postoga/lib/python3.10/site-packages/pandas/core/strings/accessor.py", line 129, in wrapper return func(self, *args, **kwargs) TypeError: StringMethods.rsplit() takes from 1 to 2 positional arguments but 3 were given
83be10b3be310f842e8f4e04b43c2f0e
{ "intermediate": 0.36930525302886963, "beginner": 0.4392514228820801, "expert": 0.19144335389137268 }
37,703
give names for a tool that creates mysql sql query indexes automatically
9df74db4efa1334241b8783aff95071d
{ "intermediate": 0.44437673687934875, "beginner": 0.3289518654346466, "expert": 0.22667139768600464 }
37,704
maintenant le problème c’est que lorsqu’on clique sur un artiste appartenant a un groupe et qu’on fait retour, toutes les cardes avec l’id de ce groupe font retour arrière aussi, il faut différencier la card avec quelque chose en plus que l’id groupe : import { useEffect, useState, useRef, useLayoutEffect } from “react”; import SearchBar from “…/…/components/form/SearchBar”; import Combo from “…/…/components/form/Combo”; import CarteArtiste from “…/…/components/Artiste/CarteProgrammation”; import { motion } from “framer-motion”; import { useLocation } from “react-router-dom”; import axios from “axios”; import Footer from “…/…/components/footer”; import CarteProgrammation from “…/…/components/Artiste/CarteProgrammation”; import { AnimatePresence } from “framer-motion”; type Props = { isNavInFocus: boolean; setIsNavTransparent: (isNavTransparent: boolean) => void; }; type Groupe = { idG: number; nomG: string; descriptionG: string; datePassage: string; heurePassage: string; }; type Artiste = { descriptionA?: string; idMG?: number; idG: number; nomDeSceneMG: string; nomMG: string; prenomMG?: string; datePassage?: string; heurePassage?: string; }; type Evenement = { dateDebutE: string; dateFinE: string; heureDebutE: string; heureFinE: string; idE: number; idG: number; idL: number | null; nomE: string; }; type Programme = Groupe | Evenement | Artiste; export default function Programmation(props: Props) { const location = useLocation(); const idArtistComingFrom = location.state?.comesFromPageArtist; const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [searchTerm, setSearchTerm] = useState(“”); window.history.replaceState({}, document.title); const [lesGroupes, setLesGroupes] = useState<Groupe[]>(location.state ? oldGroupes : []); const [lesArtistes, setLesArtistes] = useState<Artiste[]>([]); const groupePassageMap = useRef< Map<number, { datePassage: string; heurePassage: string }> >(new Map()); useEffect(() => { axios.get(“http://localhost:8080/getGroupesWithEvenements”).then((res) => { const groupedData = res.data as Programme[][]; console.log(“groupedData”) console.log(groupedData); const listeGroupes: Groupe[] = []; const groupSet = new Set<number>(); const listeArtistes: Artiste[] = []; groupedData.forEach((groupArray) => { let groupeObj: Partial<Groupe> = {}; let artisteObj: Partial<Artiste> = {}; groupArray.forEach((item) => { if (“nomG” in item) { groupeObj = { …groupeObj, …item }; } else if (“nomDeSceneMG” in item) { artisteObj = { …artisteObj, …item }; } else if (“dateDebutE” in item) { const datePassage = item.dateDebutE; const heurePassage = item.heureDebutE; if (groupeObj.idG === item.idG) { groupeObj.datePassage = datePassage; groupeObj.heurePassage = heurePassage; } if (artisteObj.idG === item.idG) { artisteObj.datePassage = datePassage; artisteObj.heurePassage = heurePassage; } } }); // si l’id n’a pas encore était ajoutée à la liste on ajoute le groupe if (groupeObj.idG !== undefined && !groupSet.has(groupeObj.idG)) { listeGroupes.push(groupeObj as Groupe); groupSet.add(groupeObj.idG); } if (artisteObj.nomDeSceneMG !== undefined) { listeArtistes.push(artisteObj as Artiste); } }); setLesGroupes(listeGroupes); // console.log(listeGroupes); groupePassageMap.current.clear(); listeGroupes.forEach((groupe) => { groupePassageMap.current.set(groupe.idG, { datePassage: groupe.datePassage, heurePassage: groupe.heurePassage, }); }); setLesArtistes(listeArtistes); }); }, []); // let filteredGroupes = lesGroupes; // if (searchTerm) { // filteredGroupes = lesGroupes.filter((groupe) => // groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()) // ); // } // let filteredArtistes = lesArtistes; // if (searchTerm) { // filteredArtistes = lesArtistes.filter((artiste) => // artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()) // ) // } const [filtreDate, setFiltreDate] = useState(“Tout”); console.log(filtreDate) let termeRechercher = ‘Tout’; if (filtreDate === ‘21 Juillet’) { termeRechercher = ‘07-21’; } else if (filtreDate === ‘22 Juillet’) { termeRechercher = ‘07-22’; } else if (filtreDate === ‘23 Juillet’) { termeRechercher = ‘07-23’; } let filteredGroupes = lesGroupes; if (searchTerm) { filteredGroupes = filteredGroupes.filter((groupe) => groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()) ); } if (filtreDate !== ‘Tout’) { console.log(termeRechercher) filteredGroupes = filteredGroupes.filter((groupe) => { console.log(groupe.datePassage); return groupe.datePassage.includes(termeRechercher); }); } let filteredArtistes = lesArtistes; if (searchTerm) { filteredArtistes = filteredArtistes.filter((artiste) => artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()) ); } if (filtreDate !== ‘Tout’) { filteredArtistes = filteredArtistes.filter((artiste) => { const groupeInfo = groupePassageMap.current.get(artiste.idG); return groupeInfo?.datePassage.includes(termeRechercher); }); } console.log(filteredGroupes); console.log(filteredArtistes); const [filtreAffichage, setFiltreAffichage] = useState(“Grille”); const [filtreGenre, setFiltreGenre] = useState(“Tout”); const pageRef = useRef<HTMLDivElement>(null); const contentVariants = { visible: { filter: “blur(0px)”, scale: 1, zIndex: 1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, hidden: { filter: “blur(10px)”, scale: 0.8, zIndex: -1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, }; useEffect(() => { window.scrollTo(0, 0); props.setIsNavTransparent(false); }, []); return ( <> <motion.div id=“Programmation” className=“page-defaut” variants={contentVariants} animate={props.isNavInFocus ? “hidden” : “visible”} ref={pageRef} > <header> <div className=“title”> <h2>PROGRAMMATION</h2> <svg width=“64” height=“64” viewBox=“0 0 64 64” fill=“none” xmlns=“http://www.w3.org/2000/svg” > <path d=“M62.9991 27.739L42.1815 27.7675L56.8787 13.0286L50.7001 6.86056L36.0029 21.5994L35.9744 0.785744L27.2406 0.797718L27.2692 21.6114L12.5316 6.91288L6.36413 13.0979L21.1017 27.7964L0.289932 27.825L0.301899 36.5537L21.1137 36.5251L6.41646 51.2641L12.6009 57.4321L27.2981 42.6932L27.3266 63.5069L36.0603 63.4949L36.0318 42.6812L50.7694 57.3798L56.931 51.1948L42.1934 36.4962L63.011 36.4677L62.9991 27.739Z” fill=“#FFD600” /> </svg> </div> <div className=“filters-container”> <div className=“filters”> <Combo title=“DATE” choices={[“Tout”, “21 Juillet”, “22 Juillet”, “23 Juillet”]} currentChoice={filtreDate} setCurrentChoice={setFiltreDate} /> <Combo title=“AFFICHAGE” choices={[“Grille”, “Horaires”]} currentChoice={filtreAffichage} setCurrentChoice={setFiltreAffichage} /> <Combo title=“GENRE” choices={[“Tout”, “Rap”, “Rock”, “Pop”]} currentChoice={filtreGenre} setCurrentChoice={setFiltreGenre} /> </div> <SearchBar text=“Rechercher un artiste” onSearch={setSearchTerm} /> </div> </header> <main className=“liste-artistes”> <AnimatePresence> {filteredGroupes.map((groupe) => { console.log(‘idArtistComingFrom for groupe:’, idArtistComingFrom); console.log(“groupe.idG : “); console.log(groupe.idG); return ( <CarteProgrammation key={groupe.idG} id={groupe.idG} nomArtiste={groupe.nomG} description={groupe.descriptionG} date={groupe.datePassage} heure={groupe.heurePassage} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == groupe.idG ? oldX : null} oldY={idArtistComingFrom == groupe.idG ? oldY : null} comesFromPageArtist={idArtistComingFrom == groupe.idG} /> ); })} {filteredArtistes.map((artiste) => { console.log(‘idArtistComingFrom for artiste:’, idArtistComingFrom); const groupeInfo = groupePassageMap.current.get(artiste.idG); console.log(“artiste.idMG : “); console.log(artiste.idMG); return ( <CarteProgrammation key={artiste.idG} id={artiste.idG} nomArtiste={artiste.nomDeSceneMG} description={artiste.descriptionA} date={groupeInfo?.datePassage ?? “Date inconnue”} heure={groupeInfo?.heurePassage ?? “Heure inconnue”} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == artiste.idG ? oldX : null} oldY={idArtistComingFrom == artiste.idG ? oldY : null} comesFromPageArtist={idArtistComingFrom == artiste.idG} /> ); })} </AnimatePresence> </main> </motion.div> <Footer /> </> ); } import { motion } from ‘framer-motion’ import {useState,useRef, useEffect} from ‘react’ import { Link } from ‘react-router-dom’ type Props = { id:number|undefined, date: string, heure: string, nomArtiste: string, setIsNavTransparent: (isNavTransparent : boolean) => void; comesFromPageArtist?: boolean; oldX?: number; oldY?: number; oldGroupes?: Groupe[]; description?: string; } type Groupe = { idG: number; nomG: string; descriptionG: string; datePassage: string; heurePassage: string; } export default function CarteProgrammation(props: Props) { const nomArtiste = props.nomArtiste.toUpperCase().split(” “) const[isHovered, setIsHovered] = useState(false) const[isSwitching, setIsSwitching] = useState(false) const[delay, setDelay] = useState(props.comesFromPageArtist? 0.2 : 0) const refCarte = useRef<HTMLDivElement>(null); const[zIndexCard, setZIndexCard] = useState(props.comesFromPageArtist ? 99 : 1) useEffect(() => { console.log(‘oldX:’, props.oldX); console.log(‘oldY:’, props.oldY); setTimeout(() => { console.log(props.date) console.log(props.heure) setZIndexCard(1) setDelay(0) }, 600); }, []) const titleVariants = { hover:{ fontSize: “2.3rem”, color:”#FFD600”, transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ fontSize: “2.3rem”, color:”#FFFBEE”, transition:{ delay:delay, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ fontSize: “7.625rem”, color:“#FFD600”, transition:{ duration:0.4, ease: [1, 0, 0,1] } } } // fais le variant pour l’image de l’artiste (scale 1.2 sur hover) const imageVariants = { hover:{ scale:1.3, transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ scale:1, transition:{ duration:0.4, ease: [1, 0, 0,1] } } } // fais le variant pour le texte de dateHeure (x 2rem sur hover) const dateHeureVariants = { hover:{ y:“0rem”, fontSize: “1.875rem”, transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ y:(window.innerWidth <= 576 ? 3.1 : 2.9).toString() + “rem”, fontSize: “1.875rem”, transition:{ delay:delay, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ y:“0rem”, fontSize: “4.6875rem”, transition:{ duration:0.4, ease: [1, 0, 0,1] } } } // default font-size: 1.875rem; // exit font-size: 4.6875rem; const heureVariants = { hover:{ transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ transition:{ duration:0.4, ease: [1, 0, 0,1] } }, } const carteVariants = { default:{ zIndex:zIndexCard, width: “24rem”, height: “15.5rem”, y:0, x:0, transition:{ delay:0.2, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ x: refCarte.current? -refCarte.current.offsetLeft : props.oldX? -props.oldX : 0, y: refCarte.current? -refCarte.current.offsetTop : props.oldY? -props.oldY : 0, height: “100vh”, width: “100vw”, zIndex:99, transition:{ duration:0.4, ease: [1, 0, 0,1] } } } const textVariants = { default:{ padding: “0.5rem 1rem”, transition:{ delay:delay2, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ padding: “3rem”, transition:{ duration:0.4, ease: [1, 0, 0,1] } } } return ( <motion.div className=“outer-carte-artiste” ref={refCarte} variants={carteVariants} initial={props.comesFromPageArtist ? “exit” : “default”} animate=“default” exit={isSwitching ? “exit” : “default”} onClick={() => window.scrollTo(0,0)} > <Link className=“carte-artiste” onMouseEnter={() => setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} to={{ pathname:“/artiste”, search:?id=${props.id}, }} state={{ nomArtiste: props.nomArtiste, date: props.date, heure: props.heure, oldX: refCarte.current?.offsetLeft, oldY: refCarte.current?.offsetTop, oldGroupes: props.oldGroupes, description: props.description }} onClick={() => {props.setIsNavTransparent(true); setIsSwitching(true)}} > <motion.img src={“http://localhost:8080/getImageArtiste/” + props.id} alt=“image de l’artiste” variants={imageVariants} initial=“default” animate={isHovered ? “hover” : “default”} exit=“default” /> <motion.div className=“texts” variants={textVariants} initial={props.comesFromPageArtist ? “exit” : “default”} animate=“default” exit={isSwitching ? “exit” : “default”} > <motion.h3 variants={titleVariants} initial={props.comesFromPageArtist ? “exit” : “default”} animate={isHovered ? “hover” : “default”} exit={isSwitching ? “exit” : “default”} >{ nomArtiste.map((mot, index) => { return( <span key={index}>{mot}<br/></span> ) }) }</motion.h3> <motion.div className=“date-heure” variants={dateHeureVariants} initial={props.comesFromPageArtist ? “exit” : “default”} animate={isHovered ? “hover” : “default”} exit={isSwitching ? “exit” : “default”} > <h4 >{props.date}</h4> <motion.h4 variants={heureVariants} initial=“default” animate={isHovered ? “hover” : “default”} exit={isSwitching ? “exit” : “default”} >{props.heure}</motion.h4> </motion.div> </motion.div> </Link> </motion.div> ) } import { useEffect, useState, useRef } from “react”; import { Link, useLocation } from “react-router-dom”; import BoutonReseau from “…/…/components/Artiste/BoutonReseau”; import { motion } from “framer-motion”; import axios from “axios”; export default function PageEvenement() { const location = useLocation(); const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [nomArtiste, setNomArtiste] = useState(location.state?.nomArtiste); const [date, setDate] = useState(location.state?.date); const [heure, setHeure] = useState(location.state?.heure); const titleRef = useRef<HTMLHeadingElement>(null); const [description, setDescription] = useState( location.state?.description || “Description par défaut si aucune description n’est passée.” ); const [socialLinks, setSocialLinks] = useState<{ reseau: string }[]>([]); const params = new URLSearchParams(window.location.search); const idArtiste = params.get(“id”); const [windowWidth, setWindowWidth] = useState(window.innerWidth); const [infosGridPosition, setInfosGridPosition] = useState<“top” | “bottom”>( “top” ); function getSocialNetworkType( url: string ): | “soundcloud” | “spotify” | “instagram” | “twitter” | “youtube” | “inconnu” { if (url.includes(“soundcloud”)) { return “soundcloud”; } else if (url.includes(“spotify”)) { return “spotify”; } else if (url.includes(“instagram”)) { return “instagram”; } else if (url.includes(“twitter”)) { return “twitter”; } else if (url.includes(“youtube”)) { return “youtube”; } else { return “inconnu”; } } useEffect(() => { axios .get(“http://localhost:8080/getSocialLinks/” + idArtiste) .then((response) => { setSocialLinks(response.data); }) .catch((error) => { console.error( “Erreur lors de la récupération des liens de réseaux sociaux”, error ); }); const handleResize = () => { setWindowWidth(window.innerWidth); }; window.addEventListener(“resize”, handleResize); return () => { window.removeEventListener(“resize”, handleResize); }; }, []); useEffect(() => { console.log(‘oldXEvenement:’, oldX); console.log(‘oldYEvenement:’, oldY); if (titleRef.current) { // regarde si la width du titre est plus grande que 25% de la width de la fenetre - 23rem if (titleRef.current.offsetWidth > windowWidth / 4 - 2 * 3 * 16) { setInfosGridPosition(“top”); } else { setInfosGridPosition(“bottom”); } } }, [titleRef, windowWidth]); const infosVariants = { initial: { opacity: 0, transition: { duration: 0.1, ease: “easeInOut”, }, }, visible: { opacity: 1, transition: { delay: 0.8, duration: 0.6, ease: “easeInOut”, }, }, }; const overlayVariants = { initial: { background: “linear-gradient(to top, rgba(0, 0, 0, 0.7) 20%, rgba(0, 0, 0, 0) 60%)”, transition: { duration: 0.1, ease: “easeInOut”, }, }, extended: { background: “linear-gradient(to top, rgba(0, 0, 0, 0.7) 40%, rgba(0, 0, 0, 0) 100%)”, transition: { delay: 0.6, duration: 0.6, ease: “easeInOut”, }, }, }; return ( <div id=“PageArtiste”> <motion.div className=“overlay” variants={overlayVariants} initial=“initial” animate={infosGridPosition === “top” ? “extended” : “initial”} /> <img src={“http://localhost:8080/getImageArtiste/” + idArtiste} alt=“image de fond” /> <div className=“content” style={{ columnGap: infosGridPosition === “top” ? “0” : “5rem”, rowGap: windowWidth > 991 ? “0” : “5rem”, }} > <h3 ref={titleRef}> {nomArtiste .toUpperCase() .split(" “) .map((mot: string, index: number) => { return ( <span key={index}> {mot} <br /> </span> ); })} </h3> <motion.div className=“infos” variants={infosVariants} initial=“initial” animate=“visible” exit=“initial” style={{ gridArea: infosGridPosition }} > <p className=“description”>{description}</p> <div className=“les-reseaux”> {socialLinks.map((link, index) => { const type = getSocialNetworkType(link.reseau); if (type) { return ( <BoutonReseau key={index} href={link.reseau} type={type} /> ); } return null; })} </div> <Link to=”/programmation" state={{ comesFromPageArtist: idArtiste, oldX: oldX, oldY: oldY, oldGroupes: oldGroupes, }} className=“btn-retour” > <svg width=“36” height=“28” viewBox=“0 0 36 28” fill=“none” xmlns=“http://www.w3.org/2000/svg” > <rect x=“6.52539” y=“0.321533” width=“35.8974” height=“3.58974” rx=“1.79487” transform=“rotate(45 6.52539 0.321533)” /> <rect x=“3.87891” y=“25.5957” width=“35.8974” height=“3.58974” rx=“1.79487” transform=“rotate(-45 3.87891 25.5957)” /> </svg> </Link> </motion.div> <div className=“date-heure”> <h4>{date}</h4> <h4>{heure}</h4> </div> </div> </div> ); }
811e1e9af26f908a399030d1f58ac513
{ "intermediate": 0.4528301954269409, "beginner": 0.40186312794685364, "expert": 0.14530660212039948 }
37,705
import os from tkinter import Tk, Button, Label, filedialog, messagebox, IntVar, Checkbutton, Spinbox # Assuming Image importing from PIL is already done (if needed) class ImageBrowser: def __init__(self, master): self.master = master self.master.title("Image Browser") self.history_pool = [] # This will be the shuffled list of images # Initialize variables self.img_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp'] self.file_index = [] self.current_index = 0 self.image_folder = "" self.limit_reached = False # Create widgets self.button_open = Button(master, text="Open Image Folder", command=self.open_image_folder) self.button_open.pack() self.button_previous = Button(master, text="Previous Image", command=self.previous_image) self.button_previous.pack(side='left') self.button_next = Button(master, text="Next Image", command=self.next_image) self.button_next.pack(side='right') self.limit_var = IntVar() self.limit_checkbox = Checkbutton(master, text="Limit", variable=self.limit_var) self.limit_checkbox.pack() self.limit_spinbox = Spinbox(master, from_=1, to=100, width=5) self.limit_spinbox.pack() # Image info labels self.label_total_img = Label(master, text="Total Images: 0") self.label_total_img.pack() self.label_img_count = Label(master, text="Image Count: 0") self.label_img_count.pack() self.label_img_name = Label(master, text="Image Name: -") self.label_img_name.pack() def open_image_folder(self): folder_path = filedialog.askdirectory() if folder_path: self.image_folder = folder_path self.file_index = [ os.path.join(self.image_folder, f) for f in os.listdir(self.image_folder) if os.path.splitext(f)[1].lower() in self.img_EXTENSIONS ] self.file_index.sort() if not self.file_index: messagebox.showinfo("No Images", "No images found in the selected folder.") return self.current_index = 0 self.limit_reached = False self.label_total_img.config(text=f"Total Images: {len(self.file_index)}") self.update_image_info() elif not self.file_index: messagebox.showinfo("No Folder", "Please select a folder to start browsing images.") def next_image(self): if not self.file_index: messagebox.showinfo("No Images", "Please open an image folder first.") return next_count = (self.current_index + 1) % len(self.file_index) if self.limit_var.get(): limit = int(self.limit_spinbox.get()) if self.limit_reached: self.limit_reached = False self.current_index = next_count elif next_count % limit == 0: # Limit is reached self.label_img_name.config(text="End of limit") self.limit_reached = True self.label_img_count.config(text=f"Image Count: {next_count}") return self.current_index = next_count self.update_image_info() def previous_image(self): if not self.file_index: messagebox.showinfo("No Images", "Please open an image folder first.") return self.limit_reached = False # View the previous image regardless of limit self.current_index = (self.current_index - 1) % len(self.file_index) self.update_image_info() def update_image_info(self): current_img_path = self.file_index[self.current_index] img_name = os.path.basename(current_img_path) self.label_img_name.config(text=f"Image Name: {img_name}") self.label_img_count.config(text=f"Image Count: {self.current_index + 1}") # You would add logic here to actually display the image, if desired if __name__ == "__main__": root = Tk() image_browser = ImageBrowser(root) root.mainloop() when i press open image, set file index and call an image from the index.put it in the history pool. make the program calls from the history pool. make the next buttton calls random image number and add it to the pool. when i press previous, make it call from the history pool. make the next button call from the pool until it reach the bottom of the pool then add new random image to the pool from the index. keep the limit button showing an end limit text for every (limit number) make it so that the next button and previous button will show every image in the folder once before starting another cycle.
0126d88cedececca6c7c127a1c423711
{ "intermediate": 0.39883655309677124, "beginner": 0.4079921841621399, "expert": 0.19317124783992767 }
37,706
How do I convert an Assimp aiMatrix4x4 to a float[16]?
331ae05a2ef3863107c251da0c9830b5
{ "intermediate": 0.408658891916275, "beginner": 0.15129533410072327, "expert": 0.4400458037853241 }
37,707
import os import random from tkinter import Tk, Button, Label, filedialog, messagebox, IntVar, Spinbox, Checkbutton class ImageBrowser: def __init__(self, master): self.master = master self.master.title("Image Browser") self.img_EXTENSIONS = ['.jpg', '.jpeg', '.png', '.gif', '.bmp', '.webp'] self.image_pool = [] self.image_history = [] # Additional variable to know if we added new random image in the history self.new_random_image_added = False self.current_index = -1 self.image_folder = "" self.limit_message_shown = False self.button_open = Button(master, text="Open Image Folder", command=self.open_image_folder) self.button_open.pack() self.button_previous = Button(master, text="Previous Image", command=self.previous_image) self.button_previous.pack(side='left') self.button_next = Button(master, text="Next Image", command=self.next_image) self.button_next.pack(side='right') self.limit_enabled = IntVar() self.limit_checkbutton = Checkbutton(master, text="Limit", variable=self.limit_enabled) self.limit_checkbutton.pack(side='left') self.limit_var = IntVar(value=1) self.limit_spinbox = Spinbox(master, from_=1, to=100, width=5, textvariable=self.limit_var) self.limit_spinbox.pack(side='left') self.label_img_name = Label(master, text="Image Name: -") self.label_img_name.pack() def open_image_folder(self): folder_path = filedialog.askdirectory() if folder_path: self.image_folder = folder_path self.image_pool = [ os.path.join(self.image_folder, f) for f in os.listdir(self.image_folder) if os.path.splitext(f)[1].lower() in self.img_EXTENSIONS ] random.shuffle(self.image_pool) self.show_image(0) def next_image(self): if not self.image_pool: messagebox.showinfo("No Images", "Please open an image folder first.") return if self.limit_enabled.get() and len(self.image_history) % self.limit_var.get() == 0 and self.image_history and not self.new_random_image_added: self.label_img_name.config(text="End of limit") return # If we are within the history boundaries, proceed with the next image in the history if self.current_index < len(self.image_history) - 1 and not self.new_random_image_added: self.current_index += 1 else: # Add a new random image self.current_index = self.get_random_unseen_image_index() self.image_history.append(self.current_index) self.new_random_image_added = True self.show_image(self.current_index) def previous_image(self): if not self.image_pool: messagebox.showinfo("No Images", "Please open an image folder first.") return if self.image_history and self.current_index > 0: # Go back in history without adding new random images self.current_index -= 1 self.new_random_image_added = False self.show_image(self.current_index) elif self.image_history: # Add a new random image that hasn't been shown to the history new_image_index = self.get_random_unseen_image_index() self.image_history.insert(0, new_image_index) self.current_index = 0 # The new image is now the current self.show_image(self.current_index) else: messagebox.showinfo("No History", "No images in previous history.") def get_random_unseen_image_index(self): available_indices = set(range(len(self.image_pool))) - set(self.image_history) if available_indices: return random.choice(list(available_indices)) else: # If all images have been seen, reshuffle the pool self.image_history = [] random.shuffle(self.image_pool) return 0 # Begin from the start of the pool def show_image(self, index): img_name = os.path.basename(self.image_pool[index]) self.label_img_name.config(text=f"Image Name: {img_name}") # Logic to display the image goes here (not shown in this snippet) if __name__ == "__main__": root = Tk() image_browser = ImageBrowser(root) root.mainloop() "new" why the next and previous seems disconnected. make the next image call from the history unless it's reached the end, then add new random image that hasn't been called. and the previous call the previously shown images unless it reaches the beginning (top of the img history) then add a new random image that hasn't been called at the top of the history, so that if i press next i go through the history first then get a new random image. and if i press previous i can see all previously called images, and when everytime reach the top of the history of shown images it adds a new random image that hasn't been called at the top of the history
2c4634288ec0dcfe627f845b92f1b385
{ "intermediate": 0.40088510513305664, "beginner": 0.4684285521507263, "expert": 0.13068631291389465 }
37,708
pourquoi les réseaux sociaux s'affichent 2 fois ? import { useEffect, useState, useRef, useLayoutEffect } from "react"; import SearchBar from "../../components/form/SearchBar"; import Combo from "../../components/form/Combo"; import CarteArtiste from "../../components/Artiste/CarteProgrammation"; import { motion } from "framer-motion"; import { useLocation } from "react-router-dom"; import axios from "axios"; import Footer from "../../components/footer"; import CarteProgrammation from "../../components/Artiste/CarteProgrammation"; import { AnimatePresence } from "framer-motion"; type Props = { isNavInFocus: boolean; setIsNavTransparent: (isNavTransparent: boolean) => void; }; type Groupe = { idG: number; nomG: string; descriptionG: string; datePassage: string; heurePassage: string; }; type Artiste = { descriptionA?: string; idMG?: number; idG: number; nomDeSceneMG: string; nomMG: string; prenomMG?: string; datePassage?: string; heurePassage?: string; }; type Evenement = { dateDebutE: string; dateFinE: string; heureDebutE: string; heureFinE: string; idE: number; idG: number; idL: number | null; nomE: string; }; type Programme = Groupe | Evenement | Artiste; export default function Programmation(props: Props) { const location = useLocation(); const idArtistComingFrom = location.state?.comesFromPageArtist; const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [searchTerm, setSearchTerm] = useState(""); window.history.replaceState({}, document.title); const [lesGroupes, setLesGroupes] = useState<Groupe[]>(location.state ? oldGroupes : []); const [lesArtistes, setLesArtistes] = useState<Artiste[]>([]); const groupePassageMap = useRef< Map<number, { datePassage: string; heurePassage: string }> >(new Map()); useEffect(() => { axios.get("http://localhost:8080/getGroupesWithEvenements").then((res) => { const groupedData = res.data as Programme[][]; // console.log("groupedData") // console.log(groupedData); const listeGroupes: Groupe[] = []; const groupSet = new Set<number>(); const listeArtistes: Artiste[] = []; groupedData.forEach((groupArray) => { let groupeObj: Partial<Groupe> = {}; let artisteObj: Partial<Artiste> = {}; groupArray.forEach((item) => { if ("nomG" in item) { groupeObj = { ...groupeObj, ...item }; } else if ("nomDeSceneMG" in item) { artisteObj = { ...artisteObj, ...item }; } else if ("dateDebutE" in item) { const datePassage = item.dateDebutE; const heurePassage = item.heureDebutE; if (groupeObj.idG === item.idG) { groupeObj.datePassage = datePassage; groupeObj.heurePassage = heurePassage; } if (artisteObj.idG === item.idG) { artisteObj.datePassage = datePassage; artisteObj.heurePassage = heurePassage; } } }); // si l'id n'a pas encore était ajoutée à la liste on ajoute le groupe if (groupeObj.idG !== undefined && !groupSet.has(groupeObj.idG)) { listeGroupes.push(groupeObj as Groupe); groupSet.add(groupeObj.idG); } if (artisteObj.nomDeSceneMG !== undefined) { listeArtistes.push(artisteObj as Artiste); } }); setLesGroupes(listeGroupes); // console.log(listeGroupes); groupePassageMap.current.clear(); listeGroupes.forEach((groupe) => { groupePassageMap.current.set(groupe.idG, { datePassage: groupe.datePassage, heurePassage: groupe.heurePassage, }); }); setLesArtistes(listeArtistes); }); }, []); // let filteredGroupes = lesGroupes; // if (searchTerm) { // filteredGroupes = lesGroupes.filter((groupe) => // groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()) // ); // } // let filteredArtistes = lesArtistes; // if (searchTerm) { // filteredArtistes = lesArtistes.filter((artiste) => // artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()) // ) // } const [filtreDate, setFiltreDate] = useState("Tout"); console.log(filtreDate) let termeRechercher = 'Tout'; if (filtreDate === '21 Juillet') { termeRechercher = '07-21'; } else if (filtreDate === '22 Juillet') { termeRechercher = '07-22'; } else if (filtreDate === '23 Juillet') { termeRechercher = '07-23'; } let filteredGroupes = lesGroupes; if (searchTerm) { filteredGroupes = filteredGroupes.filter((groupe) => { console.log(groupe.nomG); if (searchTerm.length > 5) { return groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return groupe.nomG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== 'Tout') { console.log(termeRechercher) filteredGroupes = filteredGroupes.filter((groupe) => { return groupe.datePassage.includes(termeRechercher); }); } let filteredArtistes = lesArtistes; console.log("Les Artistes avant filtre:", lesArtistes); if (searchTerm) { console.log("Filtered Artistes après searchTerm:", filteredArtistes); filteredArtistes = filteredArtistes.filter((artiste) => { if (searchTerm.length > 5) { return artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return artiste.nomDeSceneMG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== 'Tout') { filteredArtistes = filteredArtistes.filter((artiste) => { console.log("Filtered Artistes après searchTerm:", filteredArtistes); const groupeInfo = groupePassageMap.current.get(artiste.idG); return groupeInfo?.datePassage.includes(termeRechercher); }); } console.log(filteredGroupes); console.log(filteredArtistes); const [filtreAffichage, setFiltreAffichage] = useState("Grille"); const [filtreGenre, setFiltreGenre] = useState("Tout"); const pageRef = useRef<HTMLDivElement>(null); const contentVariants = { visible: { filter: "blur(0px)", scale: 1, zIndex: 1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, hidden: { filter: "blur(10px)", scale: 0.8, zIndex: -1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, }; useEffect(() => { window.scrollTo(0, 0); props.setIsNavTransparent(false); }, []); return ( <> <motion.div id="Programmation" className="page-defaut" variants={contentVariants} animate={props.isNavInFocus ? "hidden" : "visible"} ref={pageRef} > <header> <div className="title"> <h2>PROGRAMMATION</h2> <svg width="64" height="64" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg" > <path d="M62.9991 27.739L42.1815 27.7675L56.8787 13.0286L50.7001 6.86056L36.0029 21.5994L35.9744 0.785744L27.2406 0.797718L27.2692 21.6114L12.5316 6.91288L6.36413 13.0979L21.1017 27.7964L0.289932 27.825L0.301899 36.5537L21.1137 36.5251L6.41646 51.2641L12.6009 57.4321L27.2981 42.6932L27.3266 63.5069L36.0603 63.4949L36.0318 42.6812L50.7694 57.3798L56.931 51.1948L42.1934 36.4962L63.011 36.4677L62.9991 27.739Z" fill="#FFD600" /> </svg> </div> <div className="filters-container"> <div className="filters"> <Combo title="DATE" choices={["Tout", "21 Juillet", "22 Juillet", "23 Juillet"]} currentChoice={filtreDate} setCurrentChoice={setFiltreDate} /> <Combo title="AFFICHAGE" choices={["Grille", "Horaires"]} currentChoice={filtreAffichage} setCurrentChoice={setFiltreAffichage} /> <Combo title="GENRE" choices={["Tout", "Rap", "Rock", "Pop"]} currentChoice={filtreGenre} setCurrentChoice={setFiltreGenre} /> </div> <SearchBar text="Rechercher un artiste" onSearch={setSearchTerm} /> </div> </header> <main className="liste-artistes"> <AnimatePresence> {filteredGroupes.map((groupe) => { console.log('idArtistComingFrom for groupe:', idArtistComingFrom); console.log("groupe.idG : "); console.log(groupe.idG); return ( <CarteProgrammation key={groupe.idG} id={groupe.idG} nomArtiste={groupe.nomG} description={groupe.descriptionG} date={groupe.datePassage} heure={groupe.heurePassage} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == groupe.idG ? oldX : null} oldY={idArtistComingFrom == groupe.idG ? oldY : null} comesFromPageArtist={idArtistComingFrom == groupe.idG} /> ); })} {filteredArtistes.map((artiste) => { console.log('idArtistComingFrom for artiste:', idArtistComingFrom); const groupeInfo = groupePassageMap.current.get(artiste.idG); console.log("artiste.idMG : "); console.log(artiste.idMG); return ( <CarteProgrammation key={artiste.idMG} id={artiste.idMG} nomArtiste={artiste.nomDeSceneMG} description={artiste.descriptionA} date={groupeInfo?.datePassage ?? "Date inconnue"} heure={groupeInfo?.heurePassage ?? "Heure inconnue"} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == artiste.idMG ? oldX : null} oldY={idArtistComingFrom == artiste.idMG ? oldY : null} comesFromPageArtist={idArtistComingFrom == artiste.idMG} /> ); })} </AnimatePresence> </main> </motion.div> <Footer /> </> ); } import { motion } from 'framer-motion' import {useState,useRef, useEffect} from 'react' import { Link } from 'react-router-dom' type Props = { id:number|undefined, date: string, heure: string, nomArtiste: string, setIsNavTransparent: (isNavTransparent : boolean) => void; comesFromPageArtist?: boolean; oldX?: number; oldY?: number; oldGroupes?: Groupe[]; description?: string; } type Groupe = { idG: number; nomG: string; descriptionG: string; datePassage: string; heurePassage: string; } export default function CarteProgrammation(props: Props) { const nomArtiste = props.nomArtiste.toUpperCase().split(" ") const[isHovered, setIsHovered] = useState(false) const[isSwitching, setIsSwitching] = useState(false) const[delay, setDelay] = useState(props.comesFromPageArtist? 0.2 : 0) const refCarte = useRef<HTMLDivElement>(null); const[zIndexCard, setZIndexCard] = useState(props.comesFromPageArtist ? 99 : 1) useEffect(() => { console.log('oldX:', props.oldX); console.log('oldY:', props.oldY); setTimeout(() => { console.log(props.date) console.log(props.heure) setZIndexCard(1) setDelay(0) }, 600); }, []) const titleVariants = { hover:{ fontSize: "2.3rem", color:"#FFD600", transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ fontSize: "2.3rem", color:"#FFFBEE", transition:{ delay:delay, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ fontSize: "7.625rem", color:"#FFD600", transition:{ duration:0.4, ease: [1, 0, 0,1] } } } // fais le variant pour l'image de l'artiste (scale 1.2 sur hover) const imageVariants = { hover:{ scale:1.3, transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ scale:1, transition:{ duration:0.4, ease: [1, 0, 0,1] } } } // fais le variant pour le texte de dateHeure (x 2rem sur hover) const dateHeureVariants = { hover:{ y:"0rem", fontSize: "1.875rem", transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ y:(window.innerWidth <= 576 ? 3.1 : 2.9).toString() + "rem", fontSize: "1.875rem", transition:{ delay:delay, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ y:"0rem", fontSize: "4.6875rem", transition:{ duration:0.4, ease: [1, 0, 0,1] } } } // default font-size: 1.875rem; // exit font-size: 4.6875rem; const heureVariants = { hover:{ transition:{ duration:0.4, ease: [1, 0, 0,1] } }, default:{ transition:{ duration:0.4, ease: [1, 0, 0,1] } }, } const carteVariants = { default:{ zIndex:zIndexCard, width: "24rem", height: "15.5rem", y:0, x:0, transition:{ delay:0.2, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ x: refCarte.current? -refCarte.current.offsetLeft : props.oldX? -props.oldX : 0, y: refCarte.current? -refCarte.current.offsetTop : props.oldY? -props.oldY : 0, height: "100vh", width: "100vw", zIndex:99, transition:{ duration:0.4, ease: [1, 0, 0,1] } } } const textVariants = { default:{ padding: "0.5rem 1rem", transition:{ delay:delay*2, duration:0.4, ease: [1, 0, 0,1] } }, exit:{ padding: "3rem", transition:{ duration:0.4, ease: [1, 0, 0,1] } } } return ( <motion.div className="outer-carte-artiste" ref={refCarte} variants={carteVariants} initial={props.comesFromPageArtist ? "exit" : "default"} animate="default" exit={isSwitching ? "exit" : "default"} onClick={() => window.scrollTo(0,0)} > <Link className="carte-artiste" onMouseEnter={() => setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} to={{ pathname:"/artiste", search:`?id=${props.id}`, }} state={{ nomArtiste: props.nomArtiste, date: props.date, heure: props.heure, oldX: refCarte.current?.offsetLeft, oldY: refCarte.current?.offsetTop, oldGroupes: props.oldGroupes, description: props.description }} onClick={() => {props.setIsNavTransparent(true); setIsSwitching(true)}} > <motion.img src={"http://localhost:8080/getImageArtiste/" + props.id} alt="image de l'artiste" variants={imageVariants} initial="default" animate={isHovered ? "hover" : "default"} exit="default" /> <motion.div className="texts" variants={textVariants} initial={props.comesFromPageArtist ? "exit" : "default"} animate="default" exit={isSwitching ? "exit" : "default"} > <motion.h3 variants={titleVariants} initial={props.comesFromPageArtist ? "exit" : "default"} animate={isHovered ? "hover" : "default"} exit={isSwitching ? "exit" : "default"} >{ nomArtiste.map((mot, index) => { return( <span key={index}>{mot}<br/></span> ) }) }</motion.h3> <motion.div className="date-heure" variants={dateHeureVariants} initial={props.comesFromPageArtist ? "exit" : "default"} animate={isHovered ? "hover" : "default"} exit={isSwitching ? "exit" : "default"} > <h4 >{props.date}</h4> <motion.h4 variants={heureVariants} initial="default" animate={isHovered ? "hover" : "default"} exit={isSwitching ? "exit" : "default"} >{props.heure}</motion.h4> </motion.div> </motion.div> </Link> </motion.div> ) } import { useEffect, useState, useRef } from "react"; import { Link, useLocation } from "react-router-dom"; import BoutonReseau from "../../components/Artiste/BoutonReseau"; import { motion } from "framer-motion"; import axios from "axios"; export default function PageEvenement() { const location = useLocation(); const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [nomArtiste, setNomArtiste] = useState(location.state?.nomArtiste); const [date, setDate] = useState(location.state?.date); const [heure, setHeure] = useState(location.state?.heure); const titleRef = useRef<HTMLHeadingElement>(null); const [description, setDescription] = useState( location.state?.description || "Description par défaut si aucune description n'est passée." ); const [socialLinks, setSocialLinks] = useState<{ reseau: string }[]>([]); const params = new URLSearchParams(window.location.search); const idArtiste = params.get("id"); const [windowWidth, setWindowWidth] = useState(window.innerWidth); const [infosGridPosition, setInfosGridPosition] = useState<"top" | "bottom">( "top" ); function getSocialNetworkType( url: string ): | "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube" | "inconnu" { if (url.includes("soundcloud")) { return "soundcloud"; } else if (url.includes("spotify")) { return "spotify"; } else if (url.includes("instagram")) { return "instagram"; } else if (url.includes("twitter")) { return "twitter"; } else if (url.includes("youtube")) { return "youtube"; } else { return "inconnu"; } } useEffect(() => { axios .get("http://localhost:8080/getSocialLinks/" + idArtiste) .then((response) => { setSocialLinks(response.data); }) .catch((error) => { console.error( "Erreur lors de la récupération des liens de réseaux sociaux", error ); }); const handleResize = () => { setWindowWidth(window.innerWidth); }; window.addEventListener("resize", handleResize); return () => { window.removeEventListener("resize", handleResize); }; }, []); useEffect(() => { console.log('oldXEvenement:', oldX); console.log('oldYEvenement:', oldY); if (titleRef.current) { // regarde si la width du titre est plus grande que 25% de la width de la fenetre - 2*3rem if (titleRef.current.offsetWidth > windowWidth / 4 - 2 * 3 * 16) { setInfosGridPosition("top"); } else { setInfosGridPosition("bottom"); } } }, [titleRef, windowWidth]); const infosVariants = { initial: { opacity: 0, transition: { duration: 0.1, ease: "easeInOut", }, }, visible: { opacity: 1, transition: { delay: 0.8, duration: 0.6, ease: "easeInOut", }, }, }; const overlayVariants = { initial: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 20%, rgba(0, 0, 0, 0) 60%)", transition: { duration: 0.1, ease: "easeInOut", }, }, extended: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 40%, rgba(0, 0, 0, 0) 100%)", transition: { delay: 0.6, duration: 0.6, ease: "easeInOut", }, }, }; return ( <div id="PageArtiste"> <motion.div className="overlay" variants={overlayVariants} initial="initial" animate={infosGridPosition === "top" ? "extended" : "initial"} /> <img src={"http://localhost:8080/getImageArtiste/" + idArtiste} alt="image de fond" /> <div className="content" style={{ columnGap: infosGridPosition === "top" ? "0" : "5rem", rowGap: windowWidth > 991 ? "0" : "5rem", }} > <h3 ref={titleRef}> {nomArtiste .toUpperCase() .split(" ") .map((mot: string, index: number) => { return ( <span key={index}> {mot} <br /> </span> ); })} </h3> <motion.div className="infos" variants={infosVariants} initial="initial" animate="visible" exit="initial" style={{ gridArea: infosGridPosition }} > <p className="description">{description}</p> <div className="les-reseaux"> {socialLinks.map((link, index) => { const type = getSocialNetworkType(link.reseau); if (type) { return ( <BoutonReseau key={index} href={link.reseau} type={type} /> ); } return null; })} </div> <Link to="/programmation" state={{ comesFromPageArtist: idArtiste, oldX: oldX, oldY: oldY, oldGroupes: oldGroupes, }} className="btn-retour" > <svg width="36" height="28" viewBox="0 0 36 28" fill="none" xmlns="http://www.w3.org/2000/svg" > <rect x="6.52539" y="0.321533" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(45 6.52539 0.321533)" /> <rect x="3.87891" y="25.5957" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(-45 3.87891 25.5957)" /> </svg> </Link> </motion.div> <div className="date-heure"> <h4>{date}</h4> <h4>{heure}</h4> </div> </div> </div> ); }
4016f4520e9d25a0915776c2914f62b9
{ "intermediate": 0.34211859107017517, "beginner": 0.385978102684021, "expert": 0.27190327644348145 }
37,709
comment implémentez coté React l'api endpoint flask qui me permet d'obtenir le réseaux sociaux des membres groupes, c'est à dire comment faire en sorte d'utiliser les données des réseaux sociaux des groupes si s'en est et des artistes si c'est des artistes ? @app.route('/getSocialLinks/<int:id>') def getRS(id): connexion_bd = ConnexionBD() lienRS = LienRS_BD(connexion_bd) print("test") res = lienRS.get_liensRS_membre_json(id) print(res) if res is None: return jsonify({"error": "Aucun artiste trouve"}) else: return res @app.route('/getMemberSocialLinks/<int:idMG>') def get_member_social_links(idMG): connexion_bd = ConnexionBD() lienRS = LienRS_BD(connexion_bd) res = lienRS.get_liensRS_membre_par_id_json(idMG) if res is None: return jsonify({"error": "Aucun lien de réseau social trouvé pour le membre"}) else: return res import { motion } from 'framer-motion'; import {useState} from 'react' type Props = { href : string; type: "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube" | "inconnu"; } export default function BoutonReseau(props: Props) { const [isHovered, setIsHovered] = useState(false) const btnVariants = { default:{ backgroundColor:"#E45A3B00", transition:{ duration: 0.3, ease:"easeOut", } }, hover:{ backgroundColor:"#E45A3B", transition:{ duration: 0.3, ease:"easeOut", } } } const svgVariants = { default:{ fill:"#E45A3B", transition:{ duration:0.3, ease: [1, 0, 0,1] } }, hover:{ fill:"#FFFBEE", transition:{ duration:0.3, ease: [1, 0, 0,1] } } } return ( <motion.a href={props.href} className='btn-reseau' onMouseEnter={() => setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} variants={btnVariants} initial="default" animate={isHovered ? "hover" : "default"} > { props.type === "soundcloud" ? ( <svg width="51" height="22" viewBox="0 0 51 22" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M24.0742 3.20787V21.779H44.0157C48.2271 21.4821 50.1166 18.878 50.1166 15.68C50.1166 12.2993 47.567 9.581 44.1523 9.581C43.2873 9.581 42.5588 9.76374 41.8076 10.0835C41.2612 4.73833 36.7312 0.558105 31.1539 0.558105C28.4905 0.558105 25.9637 1.56319 24.0742 3.20787ZM22.071 5.53783C21.388 5.12666 20.6596 4.80686 19.8628 4.64696V21.779H23.0271V4.28148C22.6856 4.64696 22.3669 5.10382 22.071 5.53783ZM16.7214 4.30432V21.779H18.8157V4.37285C18.3832 4.30432 17.9506 4.28148 17.4954 4.28148C17.2222 4.28148 16.9718 4.28148 16.7214 4.30432ZM12.5555 5.7891V21.779H14.6271V4.73833C13.8758 4.9896 13.1701 5.35509 12.5555 5.7891ZM8.75387 11.4998C8.61729 11.4998 8.4807 11.3627 8.32135 11.2942V21.779H10.4157V7.75358C9.57339 8.85003 9.00428 10.1521 8.75387 11.4998ZM4.10996 10.8602V21.5735C4.58801 21.7105 5.13435 21.779 5.72622 21.779H6.22704V10.6775C6.04492 10.6546 5.86281 10.6318 5.72622 10.6318C5.13435 10.6318 4.58801 10.7231 4.10996 10.8602ZM0.0351562 16.2054C0.0351562 17.9186 0.809142 19.4262 2.01565 20.4542V11.9795C0.809142 12.9846 0.0351562 14.515 0.0351562 16.2054Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "spotify" ? ( <svg width="40" height="40" viewBox="0 0 40 40" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M31.8 17.8C25.4 14 14.7 13.6 8.6 15.5C7.6 15.8 6.6 15.2 6.3 14.3C6 13.3 6.6 12.3 7.5 12C14.6 9.9 26.3 10.3 33.7 14.7C34.6 15.2 34.9 16.4 34.4 17.3C33.9 18 32.7 18.3 31.8 17.8ZM31.6 23.4C31.1 24.1 30.2 24.4 29.5 23.9C24.1 20.6 15.9 19.6 9.6 21.6C8.8 21.8 7.9 21.4 7.7 20.6C7.5 19.8 7.9 18.9 8.7 18.7C16 16.5 25 17.6 31.2 21.4C31.8 21.7 32.1 22.7 31.6 23.4ZM29.2 28.9C28.8 29.5 28.1 29.7 27.5 29.3C22.8 26.4 16.9 25.8 9.9 27.4C9.2 27.6 8.6 27.1 8.4 26.5C8.2 25.8 8.7 25.2 9.3 25C16.9 23.3 23.5 24 28.7 27.2C29.4 27.5 29.5 28.3 29.2 28.9ZM20 0C17.3736 0 14.7728 0.517315 12.3463 1.52241C9.91982 2.5275 7.71504 4.00069 5.85786 5.85786C2.10714 9.60859 0 14.6957 0 20C0 25.3043 2.10714 30.3914 5.85786 34.1421C7.71504 35.9993 9.91982 37.4725 12.3463 38.4776C14.7728 39.4827 17.3736 40 20 40C25.3043 40 30.3914 37.8929 34.1421 34.1421C37.8929 30.3914 40 25.3043 40 20C40 17.3736 39.4827 14.7728 38.4776 12.3463C37.4725 9.91982 35.9993 7.71504 34.1421 5.85786C32.285 4.00069 30.0802 2.5275 27.6537 1.52241C25.2272 0.517315 22.6264 0 20 0Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "instagram" ? ( <svg width="45" height="45" viewBox="0 0 45 45" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M13.0353 0H31.9141C39.106 0 44.9494 5.84342 44.9494 13.0353V31.9141C44.9494 35.3712 43.576 38.6868 41.1314 41.1314C38.6868 43.576 35.3712 44.9494 31.9141 44.9494H13.0353C5.84342 44.9494 0 39.106 0 31.9141V13.0353C0 9.57814 1.37336 6.26255 3.81796 3.81796C6.26255 1.37336 9.57814 0 13.0353 0ZM12.5858 4.49494C10.44 4.49494 8.38204 5.34737 6.8647 6.8647C5.34737 8.38204 4.49494 10.44 4.49494 12.5858V32.3635C4.49494 36.836 8.11336 40.4544 12.5858 40.4544H32.3635C34.5094 40.4544 36.5673 39.602 38.0847 38.0847C39.602 36.5673 40.4544 34.5094 40.4544 32.3635V12.5858C40.4544 8.11336 36.836 4.49494 32.3635 4.49494H12.5858ZM34.2739 7.86614C35.019 7.86614 35.7335 8.16212 36.2604 8.68898C36.7873 9.21583 37.0832 9.93039 37.0832 10.6755C37.0832 11.4206 36.7873 12.1351 36.2604 12.662C35.7335 13.1888 35.019 13.4848 34.2739 13.4848C33.5288 13.4848 32.8142 13.1888 32.2874 12.662C31.7605 12.1351 31.4646 11.4206 31.4646 10.6755C31.4646 9.93039 31.7605 9.21583 32.2874 8.68898C32.8142 8.16212 33.5288 7.86614 34.2739 7.86614ZM22.4747 11.2373C25.455 11.2373 28.3133 12.4213 30.4207 14.5287C32.5281 16.6361 33.712 19.4944 33.712 22.4747C33.712 25.455 32.5281 28.3133 30.4207 30.4207C28.3133 32.5281 25.455 33.712 22.4747 33.712C19.4944 33.712 16.6361 32.5281 14.5287 30.4207C12.4213 28.3133 11.2373 25.455 11.2373 22.4747C11.2373 19.4944 12.4213 16.6361 14.5287 14.5287C16.6361 12.4213 19.4944 11.2373 22.4747 11.2373ZM22.4747 15.7323C20.6865 15.7323 18.9715 16.4426 17.7071 17.7071C16.4426 18.9715 15.7323 20.6865 15.7323 22.4747C15.7323 24.2629 16.4426 25.9778 17.7071 27.2423C18.9715 28.5067 20.6865 29.2171 22.4747 29.2171C24.2629 29.2171 25.9778 28.5067 27.2423 27.2423C28.5067 25.9778 29.2171 24.2629 29.2171 22.4747C29.2171 20.6865 28.5067 18.9715 27.2423 17.7071C25.9778 16.4426 24.2629 15.7323 22.4747 15.7323Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : props.type === "twitter" ? ( <svg width="49" height="39" viewBox="0 0 49 39" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M49 4.58824C47.1965 5.39118 45.2524 5.91882 43.238 6.17118C45.2992 4.95529 46.892 3.02824 47.6415 0.711176C45.6974 1.85824 43.5425 2.66118 41.2706 3.12C39.4202 1.14706 36.8203 0 33.869 0C28.3647 0 23.8676 4.40471 23.8676 9.84176C23.8676 10.6218 23.9613 11.3788 24.1252 12.09C15.7868 11.6771 8.36185 7.75412 3.41969 1.81235C2.55306 3.25765 2.06119 4.95529 2.06119 6.74471C2.06119 10.1629 3.81788 13.1912 6.53489 14.9118C4.87189 14.9118 3.326 14.4529 1.9675 13.7647V13.8335C1.9675 18.6053 5.43403 22.5971 10.0249 23.4918C8.55094 23.8868 7.00357 23.9418 5.5043 23.6524C6.14047 25.608 7.38639 27.3193 9.06691 28.5455C10.7474 29.7718 12.7781 30.4514 14.8733 30.4888C11.3216 33.2428 6.91902 34.7314 2.3891 34.71C1.59273 34.71 0.796367 34.6641 0 34.5724C4.45029 37.3712 9.74379 39 15.412 39C33.869 39 44.011 23.9965 44.011 10.9888C44.011 10.5529 44.011 10.14 43.9876 9.70412C45.9551 8.32765 47.6415 6.58412 49 4.58824Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) : ( <svg width="53" height="37" viewBox="0 0 53 37" fill="none" xmlns="http://www.w3.org/2000/svg"> <motion.path d="M52.9878 13.04C53.1069 9.6163 52.3532 6.21826 50.7969 3.16135C49.741 1.90703 48.2755 1.06056 46.6559 0.769439C39.9569 0.165541 33.2301 -0.0819771 26.5045 0.0279454C19.8034 -0.0869662 13.1009 0.152565 6.42537 0.74552C5.10558 0.984034 3.88422 1.59906 2.91031 2.51554C0.743496 4.50083 0.502739 7.89735 0.261982 10.7676C-0.0873274 15.9284 -0.0873274 21.1065 0.261982 26.2673C0.331634 27.8828 0.573748 29.4864 0.984253 31.0511C1.27455 32.2592 1.86187 33.3769 2.69363 34.3041C3.67416 35.2691 4.92397 35.9191 6.28091 36.1698C11.4714 36.8063 16.7014 37.0701 21.9301 36.9591C30.3566 37.0787 37.7479 36.9591 46.4873 36.2894C47.8776 36.0541 49.1626 35.4033 50.1709 34.4237C50.845 33.7537 51.3484 32.9338 51.6396 32.0318C52.5006 29.4069 52.9235 26.6598 52.8915 23.8993C52.9878 22.5598 52.9878 14.4751 52.9878 13.04ZM21.0634 25.3344V10.5285L35.3162 17.9673C31.3197 20.1679 26.0471 22.6555 21.0634 25.3344Z" variants={svgVariants} initial="default" animate={isHovered ? "hover" : "default"} /> </svg> ) } </motion.a> ) } import { useEffect, useState, useRef } from "react"; import { Link, useLocation } from "react-router-dom"; import BoutonReseau from "../../components/Artiste/BoutonReseau"; import { motion } from "framer-motion"; import axios from "axios"; export default function PageEvenement() { const location = useLocation(); const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [nomArtiste, setNomArtiste] = useState(location.state?.nomArtiste); const [date, setDate] = useState(location.state?.date); const [heure, setHeure] = useState(location.state?.heure); const titleRef = useRef<HTMLHeadingElement>(null); const [description, setDescription] = useState( location.state?.description || "Description par défaut si aucune description n'est passée." ); const [socialLinks, setSocialLinks] = useState<{ reseau: string }[]>([]); const params = new URLSearchParams(window.location.search); const idArtiste = params.get("id"); const [windowWidth, setWindowWidth] = useState(window.innerWidth); const [infosGridPosition, setInfosGridPosition] = useState<"top" | "bottom">( "top" ); function getSocialNetworkType( url: string ): | "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube" | "inconnu" { if (url.includes("soundcloud")) { return "soundcloud"; } else if (url.includes("spotify")) { return "spotify"; } else if (url.includes("instagram")) { return "instagram"; } else if (url.includes("twitter")) { return "twitter"; } else if (url.includes("youtube")) { return "youtube"; } else { return "inconnu"; } } useEffect(() => { axios .get("http://localhost:8080/getSocialLinks/" + idArtiste) .then((response) => { setSocialLinks(response.data); console.log(response.data); }) .catch((error) => { console.error( "Erreur lors de la récupération des liens de réseaux sociaux", error ); }); const handleResize = () => { setWindowWidth(window.innerWidth); }; window.addEventListener("resize", handleResize); return () => { window.removeEventListener("resize", handleResize); }; }, []); useEffect(() => { console.log('oldXEvenement:', oldX); console.log('oldYEvenement:', oldY); if (titleRef.current) { // regarde si la width du titre est plus grande que 25% de la width de la fenetre - 2*3rem if (titleRef.current.offsetWidth > windowWidth / 4 - 2 * 3 * 16) { setInfosGridPosition("top"); } else { setInfosGridPosition("bottom"); } } }, [titleRef, windowWidth]); const infosVariants = { initial: { opacity: 0, transition: { duration: 0.1, ease: "easeInOut", }, }, visible: { opacity: 1, transition: { delay: 0.8, duration: 0.6, ease: "easeInOut", }, }, }; const overlayVariants = { initial: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 20%, rgba(0, 0, 0, 0) 60%)", transition: { duration: 0.1, ease: "easeInOut", }, }, extended: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 40%, rgba(0, 0, 0, 0) 100%)", transition: { delay: 0.6, duration: 0.6, ease: "easeInOut", }, }, }; return ( <div id="PageArtiste"> <motion.div className="overlay" variants={overlayVariants} initial="initial" animate={infosGridPosition === "top" ? "extended" : "initial"} /> <img src={"http://localhost:8080/getImageArtiste/" + idArtiste} alt="image de fond" /> <div className="content" style={{ columnGap: infosGridPosition === "top" ? "0" : "5rem", rowGap: windowWidth > 991 ? "0" : "5rem", }} > <h3 ref={titleRef}> {nomArtiste .toUpperCase() .split(" ") .map((mot: string, index: number) => { return ( <span key={index}> {mot} <br /> </span> ); })} </h3> <motion.div className="infos" variants={infosVariants} initial="initial" animate="visible" exit="initial" style={{ gridArea: infosGridPosition }} > <p className="description">{description}</p> <div className="les-reseaux"> {socialLinks.map((link, index) => { const type = getSocialNetworkType(link.reseau); if (type) { return ( <BoutonReseau key={index} href={link.reseau} type={type} /> ); } return null; })} </div> <Link to="/programmation" state={{ comesFromPageArtist: idArtiste, oldX: oldX, oldY: oldY, oldGroupes: oldGroupes, }} className="btn-retour" > <svg width="36" height="28" viewBox="0 0 36 28" fill="none" xmlns="http://www.w3.org/2000/svg" > <rect x="6.52539" y="0.321533" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(45 6.52539 0.321533)" /> <rect x="3.87891" y="25.5957" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(-45 3.87891 25.5957)" /> </svg> </Link> </motion.div> <div className="date-heure"> <h4>{date}</h4> <h4>{heure}</h4> </div> </div> </div> ); } import { useEffect, useState, useRef, useLayoutEffect } from "react"; import SearchBar from "../../components/form/SearchBar"; import Combo from "../../components/form/Combo"; import CarteArtiste from "../../components/Artiste/CarteProgrammation"; import { motion } from "framer-motion"; import { useLocation } from "react-router-dom"; import axios from "axios"; import Footer from "../../components/footer"; import CarteProgrammation from "../../components/Artiste/CarteProgrammation"; import { AnimatePresence } from "framer-motion"; type Props = { isNavInFocus: boolean; setIsNavTransparent: (isNavTransparent: boolean) => void; }; type Groupe = { idG: number; nomG: string; descriptionG: string; datePassage: string; heurePassage: string; }; type Artiste = { descriptionA?: string; idMG?: number; idG: number; nomDeSceneMG: string; nomMG: string; prenomMG?: string; datePassage?: string; heurePassage?: string; }; type Evenement = { dateDebutE: string; dateFinE: string; heureDebutE: string; heureFinE: string; idE: number; idG: number; idL: number | null; nomE: string; }; type Programme = Groupe | Evenement | Artiste; export default function Programmation(props: Props) { const location = useLocation(); const idArtistComingFrom = location.state?.comesFromPageArtist; const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [searchTerm, setSearchTerm] = useState(""); window.history.replaceState({}, document.title); const [lesGroupes, setLesGroupes] = useState<Groupe[]>(location.state ? oldGroupes : []); const [lesArtistes, setLesArtistes] = useState<Artiste[]>([]); const groupePassageMap = useRef< Map<number, { datePassage: string; heurePassage: string }> >(new Map()); useEffect(() => { axios.get("http://localhost:8080/getGroupesWithEvenements").then((res) => { const groupedData = res.data as Programme[][]; // console.log("groupedData") // console.log(groupedData); const listeGroupes: Groupe[] = []; const groupSet = new Set<number>(); const listeArtistes: Artiste[] = []; groupedData.forEach((groupArray) => { let groupeObj: Partial<Groupe> = {}; let artisteObj: Partial<Artiste> = {}; groupArray.forEach((item) => { if ("nomG" in item) { groupeObj = { ...groupeObj, ...item }; } else if ("nomDeSceneMG" in item) { artisteObj = { ...artisteObj, ...item }; } else if ("dateDebutE" in item) { const datePassage = item.dateDebutE; const heurePassage = item.heureDebutE; if (groupeObj.idG === item.idG) { groupeObj.datePassage = datePassage; groupeObj.heurePassage = heurePassage; } if (artisteObj.idG === item.idG) { artisteObj.datePassage = datePassage; artisteObj.heurePassage = heurePassage; } } }); // si l'id n'a pas encore était ajoutée à la liste on ajoute le groupe if (groupeObj.idG !== undefined && !groupSet.has(groupeObj.idG)) { listeGroupes.push(groupeObj as Groupe); groupSet.add(groupeObj.idG); } if (artisteObj.nomDeSceneMG !== undefined) { listeArtistes.push(artisteObj as Artiste); } }); setLesGroupes(listeGroupes); // console.log(listeGroupes); groupePassageMap.current.clear(); listeGroupes.forEach((groupe) => { groupePassageMap.current.set(groupe.idG, { datePassage: groupe.datePassage, heurePassage: groupe.heurePassage, }); }); setLesArtistes(listeArtistes); }); }, []); // let filteredGroupes = lesGroupes; // if (searchTerm) { // filteredGroupes = lesGroupes.filter((groupe) => // groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()) // ); // } // let filteredArtistes = lesArtistes; // if (searchTerm) { // filteredArtistes = lesArtistes.filter((artiste) => // artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()) // ) // } const [filtreDate, setFiltreDate] = useState("Tout"); console.log(filtreDate) let termeRechercher = 'Tout'; if (filtreDate === '21 Juillet') { termeRechercher = '07-21'; } else if (filtreDate === '22 Juillet') { termeRechercher = '07-22'; } else if (filtreDate === '23 Juillet') { termeRechercher = '07-23'; } let filteredGroupes = lesGroupes; if (searchTerm) { filteredGroupes = filteredGroupes.filter((groupe) => { console.log(groupe.nomG); if (searchTerm.length > 5) { return groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return groupe.nomG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== 'Tout') { console.log(termeRechercher) filteredGroupes = filteredGroupes.filter((groupe) => { return groupe.datePassage.includes(termeRechercher); }); } let filteredArtistes = lesArtistes; console.log("Les Artistes avant filtre:", lesArtistes); if (searchTerm) { console.log("Filtered Artistes après searchTerm:", filteredArtistes); filteredArtistes = filteredArtistes.filter((artiste) => { if (searchTerm.length > 5) { return artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return artiste.nomDeSceneMG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== 'Tout') { filteredArtistes = filteredArtistes.filter((artiste) => { console.log("Filtered Artistes après searchTerm:", filteredArtistes); const groupeInfo = groupePassageMap.current.get(artiste.idG); return groupeInfo?.datePassage.includes(termeRechercher); }); } console.log(filteredGroupes); console.log(filteredArtistes); const [filtreAffichage, setFiltreAffichage] = useState("Grille"); const [filtreGenre, setFiltreGenre] = useState("Tout"); const pageRef = useRef<HTMLDivElement>(null); const contentVariants = { visible: { filter: "blur(0px)", scale: 1, zIndex: 1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, hidden: { filter: "blur(10px)", scale: 0.8, zIndex: -1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, }; useEffect(() => { window.scrollTo(0, 0); props.setIsNavTransparent(false); }, []); return ( <> <motion.div id="Programmation" className="page-defaut" variants={contentVariants} animate={props.isNavInFocus ? "hidden" : "visible"} ref={pageRef} > <header> <div className="title"> <h2>PROGRAMMATION</h2> <svg width="64" height="64" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg" > <path d="M62.9991 27.739L42.1815 27.7675L56.8787 13.0286L50.7001 6.86056L36.0029 21.5994L35.9744 0.785744L27.2406 0.797718L27.2692 21.6114L12.5316 6.91288L6.36413 13.0979L21.1017 27.7964L0.289932 27.825L0.301899 36.5537L21.1137 36.5251L6.41646 51.2641L12.6009 57.4321L27.2981 42.6932L27.3266 63.5069L36.0603 63.4949L36.0318 42.6812L50.7694 57.3798L56.931 51.1948L42.1934 36.4962L63.011 36.4677L62.9991 27.739Z" fill="#FFD600" /> </svg> </div> <div className="filters-container"> <div className="filters"> <Combo title="DATE" choices={["Tout", "21 Juillet", "22 Juillet", "23 Juillet"]} currentChoice={filtreDate} setCurrentChoice={setFiltreDate} /> <Combo title="AFFICHAGE" choices={["Grille", "Horaires"]} currentChoice={filtreAffichage} setCurrentChoice={setFiltreAffichage} /> <Combo title="GENRE" choices={["Tout", "Rap", "Rock", "Pop"]} currentChoice={filtreGenre} setCurrentChoice={setFiltreGenre} /> </div> <SearchBar text="Rechercher un artiste" onSearch={setSearchTerm} /> </div> </header> <main className="liste-artistes"> <AnimatePresence> {filteredGroupes.map((groupe) => { console.log('idArtistComingFrom for groupe:', idArtistComingFrom); console.log("groupe.idG : "); console.log(groupe.idG); return ( <CarteProgrammation key={groupe.idG} id={groupe.idG} nomArtiste={groupe.nomG} description={groupe.descriptionG} date={groupe.datePassage} heure={groupe.heurePassage} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == groupe.idG ? oldX : null} oldY={idArtistComingFrom == groupe.idG ? oldY : null} comesFromPageArtist={idArtistComingFrom == groupe.idG} /> ); })} {filteredArtistes.map((artiste) => { console.log('idArtistComingFrom for artiste:', idArtistComingFrom); const groupeInfo = groupePassageMap.current.get(artiste.idG); console.log("artiste.idMG : "); console.log(artiste.idMG); return ( <CarteProgrammation key={artiste.idMG} id={artiste.idMG} nomArtiste={artiste.nomDeSceneMG} description={artiste.descriptionA} date={groupeInfo?.datePassage ?? "Date inconnue"} heure={groupeInfo?.heurePassage ?? "Heure inconnue"} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == artiste.idMG ? oldX : null} oldY={idArtistComingFrom == artiste.idMG ? oldY : null} comesFromPageArtist={idArtistComingFrom == artiste.idMG} /> ); })} </AnimatePresence> </main> </motion.div> <Footer /> </> ); } import { TargetAndTransition, motion } from "framer-motion"; import { useContext, useState } from "react"; import Button from "../components/form/Button"; import { getCookie, setCookie } from "../cookies/CookiesLib.tsx"; import { CartContext } from "../App"; const initialDays = { "20 Jui": false, "21 Jui": false, "22 Jui": false, }; type Props = { id: number; title: string; price: number | string; nbTicket: number; isForfait?: boolean; disabled?: boolean; }; export default function TicketCard({ id, title, price, nbTicket, isForfait, }: Props) { const [isOpen, setIsOpen] = useState(false); const [tickets, setTickets] = useState(nbTicket); const [rotation, setRotation] = useState(0); const [days, setDays] = useState(initialDays); const { cart, setCart } = useContext(CartContext); const [isLoading, setIsLoading] = useState(false); const [isAdded, setIsAdded] = useState(false); type DayKey = "20 Jui" |"21 Jui"|"22 Jui"; const pricesByDay:{[key in DayKey]: number} = { "20 Jui": 60, "21 Jui": 80, "22 Jui": 90, }; const selectedDayCount = () => { return Object.values(days).filter(Boolean).length; } const isDisabled = tickets === 0 || (isForfait && selectedDayCount() !== 2); const handleTicketChange = (newTickets: number, event: React.MouseEvent) => { event.stopPropagation(); setTickets(newTickets); } const addToCartHandler = async ( event: React.MouseEvent<HTMLButtonElement> ) => { event.stopPropagation(); setIsLoading(true); // Longueur actuelle du panier dans les cookies const selectedDaysSortedString = isForfait ? Object.entries(days) .filter(([_, isChosen]) => isChosen) .map(([day, _]) => day) .sort() .join('-') : null; const calculPrixSelonJourSelec = isForfait && selectedDaysSortedString ? selectedDaysSortedString.split('-').reduce((total, dayKey) => { return total + pricesByDay[dayKey as DayKey]; }, 0) : (typeof price === "number" ? price : 0); const uniqueId = isForfait && selectedDaysSortedString ? `${id}-${selectedDaysSortedString}` : `${id}`; const itemForCart = { id, uniqueId, title, price: isForfait ? calculPrixSelonJourSelec : (typeof price === "number" ? price : 0), quantity: tickets, selectedDaysSortedString: isForfait ? selectedDaysSortedString : null, }; // const currentCartLength = (getCookie("cart") || []).length; // condition non valide car si on ajoute un typê billet déjà présent dans le panier, // l'ajout des nouveaux billets ne ferons que changer la valeur de quantité dans le tableau et // donc la longueur du tableau ne changera pas // Initialise la vérification de l'ajout let expectedNewQuantity = itemForCart.quantity; let newCart = [...cart]; const forfaitIndex = newCart.findIndex((billet) => billet.uniqueId === uniqueId); // si l'article existait déjà dans le panie on garde la quantité précédente et on détermine la nouvelle quantité attendue if (forfaitIndex > -1) { // si ce type de billet existe déjà, ajoute à la quantité et calcule la nouvelle quantité attendue expectedNewQuantity += newCart[forfaitIndex].quantity; newCart[forfaitIndex].quantity = expectedNewQuantity; } else { // sinon on l'ajoute au panier newCart.push(itemForCart); } setCart(newCart); setCookie("cart", newCart, { expires: 7, sameSite: "None", secure: true }); setTimeout(() => { // trouver l'article après la mise à jour pour vérifier que la quantité est correcte const newForfaitIndex = newCart.findIndex((billet) => billet.uniqueId === uniqueId); const hasCorrectQuantity = newForfaitIndex > -1 ? newCart[newForfaitIndex].quantity === expectedNewQuantity : false; if (hasCorrectQuantity) { setIsAdded(true); } else { console.error('Erreur : la quantité dans le panier ne correspond pas à la quantité attendue.'); } setIsLoading(false); // Afficher le message 'ARTICLE AJOUTÉ' pendant un certain temps setTimeout(() => { setIsAdded(false); }, 2000); }, 400); }; const displayDay = (day: string) => day.replace("Juillet", "Juillet"); const buttonVariants: { [key: string]: TargetAndTransition } = { tap: { scale: 0.95 }, selected: { scale: 1.1, backgroundColor: "#E45A3B" }, unselected: { scale: 1, backgroundColor: "#FFFFFF" }, }; const contentVariants = { closed: { opacity: 0, height: 0, overflow: "hidden", transition: { duration: 0.2, ease: "easeInOut", when: "afterChildren", }, }, open: { opacity: 1, height: title === "Forfait 2 jours" ? 160 : 150, transition: { duration: 0.2, ease: "easeInOut", when: "beforeChildren", }, }, }; const cardVariants = { hidden: { opacity: 0, y: 50 }, visible: { opacity: 1, y: 0, transition: { type: "spring", stiffness: 120, }, }, }; const calculateSubTotal = () => { if (isForfait) { // Calculer le prix total en fonction des jours sélectionnés const totalSelectedDaysPrice = Object.entries(days) .filter(([_, isSelected]) => isSelected) .reduce((total, [day]) => { return total + pricesByDay[day as keyof typeof pricesByDay]; }, 0); return tickets * totalSelectedDaysPrice; } else { return tickets * (typeof price === 'number' ? price : parseInt(price, 10)); } }; const maxSelectedDays = 2; const toggleDaySelection = (day: keyof typeof initialDays) => { setDays((prevDays) => { const isSelected = prevDays[day]; const count = selectedDayCount(); if (!isSelected && count >= maxSelectedDays) { return prevDays; } return { ...prevDays, [day]: !prevDays[day] }; }); }; return ( <motion.div className="ticket-card" layout initial="hidden" animate="visible" variants={cardVariants} onClick={() => { setIsOpen(!isOpen); setRotation(rotation === 0 ? 90 : 0); }} > <div className="content"> <div className="left-part"> <h4>{title}</h4> <p>Les tickets ne sont pas remboursables.</p> <p>Dernière entrée à 11H.</p> </div> <div className="right-part"> <p>{price}€</p> <motion.div className="svg-container" animate={{ rotate: rotation }}> <svg xmlns="http://www.w3.org/2000/svg" width="13" height="20" viewBox="0 0 13 20" fill="none" > <path d="M2 18L10 10L2 2" stroke="#4E4E4E" strokeWidth="4" /> </svg> </motion.div> </div> </div> <motion.div className={`sub-menu ${ title === "Forfait 2 jours" ? "forfait-2j" : "" }`} variants={contentVariants} initial="closed" animate={isOpen ? "open" : "closed"} exit="closed" > <div className="top-partsubmenu"> <div className="left-part-sub"> <div className="sub-menu-left-part"> <div className="rect"> <img className="" src="images/billet_pass1j.png" alt="Billet pass 1 jour" /> </div> <div className="container"></div> <div className="article-select"> <svg xmlns="http://www.w3.org/2000/svg" width="22" height="21" viewBox="0 0 22 21" fill="none" > <path d="M22 9.03848H14.6966L19.8599 4.10947L17.6953 2.04109L12.532 6.97007V0H9.46799V6.97007L4.30475 2.04109L2.13807 4.10947L7.30131 9.03848H0V11.9615H7.30131L2.13807 16.8906L4.30475 18.9589L9.46799 14.0299V21H12.532V14.0299L17.6953 18.9589L19.8599 16.8906L14.6966 11.9615H22V9.03848Z" fill="#FFD600" /> </svg> <p>x{tickets} Article(s) sélectionné(s)</p> </div> </div> <div className="ticket-control"> <button className="minusButton" onClick={(event) => handleTicketChange(Math.max(tickets - 1, 0), event) } > - </button> <span>{tickets}</span> <button className="sommeButton" onClick={(event) => handleTicketChange(tickets + 1, event)} > + </button> </div> </div> </div> <div className="delimiter-submenu"></div> <div className="bottom-partsubmenu"> <div className="bottom-part-left"> <div className="day-checkbox-container"> {isForfait && (Object.keys(days) as Array<keyof typeof initialDays>).map( (day) => ( <motion.label key={day} className="day-checkbox" whileTap="tap" > <input type="checkbox" checked={days[day]} onChange={() => toggleDaySelection(day)} style={{ display: "none" }} /> <motion.div className="day-button" variants={buttonVariants} animate={days[day] ? "selected" : "unselected"} > {displayDay(day)} </motion.div> </motion.label> ) )} </div> <p>Sous-total</p> <p>{calculateSubTotal().toFixed(2)}€</p> </div> <Button text={ isLoading ? "Chargement…" : isAdded ? "ARTICLE AJOUTÉ" : "AJOUT PANIER" } isLoading={isLoading} onClick={(event: React.MouseEvent<HTMLElement>) => addToCartHandler(event as React.MouseEvent<HTMLButtonElement>) } isDisabled={isDisabled} /> </div> </motion.div> </motion.div> ); }
4c4e698b133d110eaa2bc702dbb3522a
{ "intermediate": 0.30737733840942383, "beginner": 0.4489535391330719, "expert": 0.2436690777540207 }
37,710
comment implémentez coté React l’api endpoint flask qui me permet d’obtenir le réseaux sociaux des membres groupes, c’est à dire comment faire en sorte d’utiliser les données des réseaux sociaux des groupes si s’en est et des artistes si c’est des artistes ? @app.route(‘/getSocialLinks/int:id’) def getRS(id): connexion_bd = ConnexionBD() lienRS = LienRS_BD(connexion_bd) print(“test”) res = lienRS.get_liensRS_membre_json(id) print(res) if res is None: return jsonify({“error”: “Aucun artiste trouve”}) else: return res @app.route(‘/getMemberSocialLinks/<int:idMG>’) def get_member_social_links(idMG): connexion_bd = ConnexionBD() lienRS = LienRS_BD(connexion_bd) res = lienRS.get_liensRS_membre_par_id_json(idMG) if res is None: return jsonify({“error”: “Aucun lien de réseau social trouvé pour le membre”}) else: return res import { motion } from ‘framer-motion’; import {useState} from ‘react’ type Props = { href : string; type: “soundcloud” | “spotify” | “instagram” | “twitter” | “youtube” | “inconnu”; } export default function BoutonReseau(props: Props) { const [isHovered, setIsHovered] = useState(false) const btnVariants = { default:{ backgroundColor:“#E45A3B00”, transition:{ duration: 0.3, ease:“easeOut”, } }, hover:{ backgroundColor:“#E45A3B”, transition:{ duration: 0.3, ease:“easeOut”, } } } const svgVariants = { default:{ fill:“#E45A3B”, transition:{ duration:0.3, ease: [1, 0, 0,1] } }, hover:{ fill:“#FFFBEE”, transition:{ duration:0.3, ease: [1, 0, 0,1] } } } return ( <motion.a href={props.href} className=‘btn-reseau’ onMouseEnter={() => setIsHovered(true)} onMouseLeave={() => setIsHovered(false)} variants={btnVariants} initial=“default” animate={isHovered ? “hover” : “default”} > { props.type === “soundcloud” ? ( <svg width=“51” height=“22” viewBox=“0 0 51 22” fill=“none” xmlns=“http://www.w3.org/2000/svg”> <motion.path d=“M24.0742 3.20787V21.779H44.0157C48.2271 21.4821 50.1166 18.878 50.1166 15.68C50.1166 12.2993 47.567 9.581 44.1523 9.581C43.2873 9.581 42.5588 9.76374 41.8076 10.0835C41.2612 4.73833 36.7312 0.558105 31.1539 0.558105C28.4905 0.558105 25.9637 1.56319 24.0742 3.20787ZM22.071 5.53783C21.388 5.12666 20.6596 4.80686 19.8628 4.64696V21.779H23.0271V4.28148C22.6856 4.64696 22.3669 5.10382 22.071 5.53783ZM16.7214 4.30432V21.779H18.8157V4.37285C18.3832 4.30432 17.9506 4.28148 17.4954 4.28148C17.2222 4.28148 16.9718 4.28148 16.7214 4.30432ZM12.5555 5.7891V21.779H14.6271V4.73833C13.8758 4.9896 13.1701 5.35509 12.5555 5.7891ZM8.75387 11.4998C8.61729 11.4998 8.4807 11.3627 8.32135 11.2942V21.779H10.4157V7.75358C9.57339 8.85003 9.00428 10.1521 8.75387 11.4998ZM4.10996 10.8602V21.5735C4.58801 21.7105 5.13435 21.779 5.72622 21.779H6.22704V10.6775C6.04492 10.6546 5.86281 10.6318 5.72622 10.6318C5.13435 10.6318 4.58801 10.7231 4.10996 10.8602ZM0.0351562 16.2054C0.0351562 17.9186 0.809142 19.4262 2.01565 20.4542V11.9795C0.809142 12.9846 0.0351562 14.515 0.0351562 16.2054Z” variants={svgVariants} initial=“default” animate={isHovered ? “hover” : “default”} /> </svg> ) : props.type === “spotify” ? ( <svg width=“40” height=“40” viewBox=“0 0 40 40” fill=“none” xmlns=“http://www.w3.org/2000/svg”> <motion.path d=“M31.8 17.8C25.4 14 14.7 13.6 8.6 15.5C7.6 15.8 6.6 15.2 6.3 14.3C6 13.3 6.6 12.3 7.5 12C14.6 9.9 26.3 10.3 33.7 14.7C34.6 15.2 34.9 16.4 34.4 17.3C33.9 18 32.7 18.3 31.8 17.8ZM31.6 23.4C31.1 24.1 30.2 24.4 29.5 23.9C24.1 20.6 15.9 19.6 9.6 21.6C8.8 21.8 7.9 21.4 7.7 20.6C7.5 19.8 7.9 18.9 8.7 18.7C16 16.5 25 17.6 31.2 21.4C31.8 21.7 32.1 22.7 31.6 23.4ZM29.2 28.9C28.8 29.5 28.1 29.7 27.5 29.3C22.8 26.4 16.9 25.8 9.9 27.4C9.2 27.6 8.6 27.1 8.4 26.5C8.2 25.8 8.7 25.2 9.3 25C16.9 23.3 23.5 24 28.7 27.2C29.4 27.5 29.5 28.3 29.2 28.9ZM20 0C17.3736 0 14.7728 0.517315 12.3463 1.52241C9.91982 2.5275 7.71504 4.00069 5.85786 5.85786C2.10714 9.60859 0 14.6957 0 20C0 25.3043 2.10714 30.3914 5.85786 34.1421C7.71504 35.9993 9.91982 37.4725 12.3463 38.4776C14.7728 39.4827 17.3736 40 20 40C25.3043 40 30.3914 37.8929 34.1421 34.1421C37.8929 30.3914 40 25.3043 40 20C40 17.3736 39.4827 14.7728 38.4776 12.3463C37.4725 9.91982 35.9993 7.71504 34.1421 5.85786C32.285 4.00069 30.0802 2.5275 27.6537 1.52241C25.2272 0.517315 22.6264 0 20 0Z” variants={svgVariants} initial=“default” animate={isHovered ? “hover” : “default”} /> </svg> ) : props.type === “instagram” ? ( <svg width=“45” height=“45” viewBox=“0 0 45 45” fill=“none” xmlns=“http://www.w3.org/2000/svg”> <motion.path d=“M13.0353 0H31.9141C39.106 0 44.9494 5.84342 44.9494 13.0353V31.9141C44.9494 35.3712 43.576 38.6868 41.1314 41.1314C38.6868 43.576 35.3712 44.9494 31.9141 44.9494H13.0353C5.84342 44.9494 0 39.106 0 31.9141V13.0353C0 9.57814 1.37336 6.26255 3.81796 3.81796C6.26255 1.37336 9.57814 0 13.0353 0ZM12.5858 4.49494C10.44 4.49494 8.38204 5.34737 6.8647 6.8647C5.34737 8.38204 4.49494 10.44 4.49494 12.5858V32.3635C4.49494 36.836 8.11336 40.4544 12.5858 40.4544H32.3635C34.5094 40.4544 36.5673 39.602 38.0847 38.0847C39.602 36.5673 40.4544 34.5094 40.4544 32.3635V12.5858C40.4544 8.11336 36.836 4.49494 32.3635 4.49494H12.5858ZM34.2739 7.86614C35.019 7.86614 35.7335 8.16212 36.2604 8.68898C36.7873 9.21583 37.0832 9.93039 37.0832 10.6755C37.0832 11.4206 36.7873 12.1351 36.2604 12.662C35.7335 13.1888 35.019 13.4848 34.2739 13.4848C33.5288 13.4848 32.8142 13.1888 32.2874 12.662C31.7605 12.1351 31.4646 11.4206 31.4646 10.6755C31.4646 9.93039 31.7605 9.21583 32.2874 8.68898C32.8142 8.16212 33.5288 7.86614 34.2739 7.86614ZM22.4747 11.2373C25.455 11.2373 28.3133 12.4213 30.4207 14.5287C32.5281 16.6361 33.712 19.4944 33.712 22.4747C33.712 25.455 32.5281 28.3133 30.4207 30.4207C28.3133 32.5281 25.455 33.712 22.4747 33.712C19.4944 33.712 16.6361 32.5281 14.5287 30.4207C12.4213 28.3133 11.2373 25.455 11.2373 22.4747C11.2373 19.4944 12.4213 16.6361 14.5287 14.5287C16.6361 12.4213 19.4944 11.2373 22.4747 11.2373ZM22.4747 15.7323C20.6865 15.7323 18.9715 16.4426 17.7071 17.7071C16.4426 18.9715 15.7323 20.6865 15.7323 22.4747C15.7323 24.2629 16.4426 25.9778 17.7071 27.2423C18.9715 28.5067 20.6865 29.2171 22.4747 29.2171C24.2629 29.2171 25.9778 28.5067 27.2423 27.2423C28.5067 25.9778 29.2171 24.2629 29.2171 22.4747C29.2171 20.6865 28.5067 18.9715 27.2423 17.7071C25.9778 16.4426 24.2629 15.7323 22.4747 15.7323Z” variants={svgVariants} initial=“default” animate={isHovered ? “hover” : “default”} /> </svg> ) : props.type === “twitter” ? ( <svg width=“49” height=“39” viewBox=“0 0 49 39” fill=“none” xmlns=“http://www.w3.org/2000/svg”> <motion.path d=“M49 4.58824C47.1965 5.39118 45.2524 5.91882 43.238 6.17118C45.2992 4.95529 46.892 3.02824 47.6415 0.711176C45.6974 1.85824 43.5425 2.66118 41.2706 3.12C39.4202 1.14706 36.8203 0 33.869 0C28.3647 0 23.8676 4.40471 23.8676 9.84176C23.8676 10.6218 23.9613 11.3788 24.1252 12.09C15.7868 11.6771 8.36185 7.75412 3.41969 1.81235C2.55306 3.25765 2.06119 4.95529 2.06119 6.74471C2.06119 10.1629 3.81788 13.1912 6.53489 14.9118C4.87189 14.9118 3.326 14.4529 1.9675 13.7647V13.8335C1.9675 18.6053 5.43403 22.5971 10.0249 23.4918C8.55094 23.8868 7.00357 23.9418 5.5043 23.6524C6.14047 25.608 7.38639 27.3193 9.06691 28.5455C10.7474 29.7718 12.7781 30.4514 14.8733 30.4888C11.3216 33.2428 6.91902 34.7314 2.3891 34.71C1.59273 34.71 0.796367 34.6641 0 34.5724C4.45029 37.3712 9.74379 39 15.412 39C33.869 39 44.011 23.9965 44.011 10.9888C44.011 10.5529 44.011 10.14 43.9876 9.70412C45.9551 8.32765 47.6415 6.58412 49 4.58824Z” variants={svgVariants} initial=“default” animate={isHovered ? “hover” : “default”} /> </svg> ) : ( <svg width=“53” height=“37” viewBox=“0 0 53 37” fill=“none” xmlns=“http://www.w3.org/2000/svg”> <motion.path d=“M52.9878 13.04C53.1069 9.6163 52.3532 6.21826 50.7969 3.16135C49.741 1.90703 48.2755 1.06056 46.6559 0.769439C39.9569 0.165541 33.2301 -0.0819771 26.5045 0.0279454C19.8034 -0.0869662 13.1009 0.152565 6.42537 0.74552C5.10558 0.984034 3.88422 1.59906 2.91031 2.51554C0.743496 4.50083 0.502739 7.89735 0.261982 10.7676C-0.0873274 15.9284 -0.0873274 21.1065 0.261982 26.2673C0.331634 27.8828 0.573748 29.4864 0.984253 31.0511C1.27455 32.2592 1.86187 33.3769 2.69363 34.3041C3.67416 35.2691 4.92397 35.9191 6.28091 36.1698C11.4714 36.8063 16.7014 37.0701 21.9301 36.9591C30.3566 37.0787 37.7479 36.9591 46.4873 36.2894C47.8776 36.0541 49.1626 35.4033 50.1709 34.4237C50.845 33.7537 51.3484 32.9338 51.6396 32.0318C52.5006 29.4069 52.9235 26.6598 52.8915 23.8993C52.9878 22.5598 52.9878 14.4751 52.9878 13.04ZM21.0634 25.3344V10.5285L35.3162 17.9673C31.3197 20.1679 26.0471 22.6555 21.0634 25.3344Z” variants={svgVariants} initial=“default” animate={isHovered ? “hover” : “default”} /> </svg> ) } </motion.a> ) } import { useEffect, useState, useRef } from “react”; import { Link, useLocation } from “react-router-dom”; import BoutonReseau from “…/…/components/Artiste/BoutonReseau”; import { motion } from “framer-motion”; import axios from “axios”; export default function PageEvenement() { const location = useLocation(); const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [nomArtiste, setNomArtiste] = useState(location.state?.nomArtiste); const [date, setDate] = useState(location.state?.date); const [heure, setHeure] = useState(location.state?.heure); const titleRef = useRef<HTMLHeadingElement>(null); const [description, setDescription] = useState( location.state?.description || “Description par défaut si aucune description n’est passée.” ); const [socialLinks, setSocialLinks] = useState<{ reseau: string }[]>([]); const params = new URLSearchParams(window.location.search); const idArtiste = params.get(“id”); const [windowWidth, setWindowWidth] = useState(window.innerWidth); const [infosGridPosition, setInfosGridPosition] = useState<“top” | “bottom”>( “top” ); function getSocialNetworkType( url: string ): | “soundcloud” | “spotify” | “instagram” | “twitter” | “youtube” | “inconnu” { if (url.includes(“soundcloud”)) { return “soundcloud”; } else if (url.includes(“spotify”)) { return “spotify”; } else if (url.includes(“instagram”)) { return “instagram”; } else if (url.includes(“twitter”)) { return “twitter”; } else if (url.includes(“youtube”)) { return “youtube”; } else { return “inconnu”; } } useEffect(() => { axios .get(“http://localhost:8080/getSocialLinks/” + idArtiste) .then((response) => { setSocialLinks(response.data); console.log(response.data); }) .catch((error) => { console.error( “Erreur lors de la récupération des liens de réseaux sociaux”, error ); }); const handleResize = () => { setWindowWidth(window.innerWidth); }; window.addEventListener(“resize”, handleResize); return () => { window.removeEventListener(“resize”, handleResize); }; }, []); useEffect(() => { console.log(‘oldXEvenement:’, oldX); console.log(‘oldYEvenement:’, oldY); if (titleRef.current) { // regarde si la width du titre est plus grande que 25% de la width de la fenetre - 2*3rem if (titleRef.current.offsetWidth > windowWidth / 4 - 2 * 3 * 16) { setInfosGridPosition(“top”); } else { setInfosGridPosition(“bottom”); } } }, [titleRef, windowWidth]); const infosVariants = { initial: { opacity: 0, transition: { duration: 0.1, ease: “easeInOut”, }, }, visible: { opacity: 1, transition: { delay: 0.8, duration: 0.6, ease: “easeInOut”, }, }, }; const overlayVariants = { initial: { background: “linear-gradient(to top, rgba(0, 0, 0, 0.7) 20%, rgba(0, 0, 0, 0) 60%)”, transition: { duration: 0.1, ease: “easeInOut”, }, }, extended: { background: “linear-gradient(to top, rgba(0, 0, 0, 0.7) 40%, rgba(0, 0, 0, 0) 100%)”, transition: { delay: 0.6, duration: 0.6, ease: “easeInOut”, }, }, }; return ( <div id=“PageArtiste”> <motion.div className=“overlay” variants={overlayVariants} initial=“initial” animate={infosGridPosition === “top” ? “extended” : “initial”} /> <img src={“http://localhost:8080/getImageArtiste/” + idArtiste} alt=“image de fond” /> <div className=“content” style={{ columnGap: infosGridPosition === “top” ? “0” : “5rem”, rowGap: windowWidth > 991 ? “0” : “5rem”, }} > <h3 ref={titleRef}> {nomArtiste .toUpperCase() .split(" “) .map((mot: string, index: number) => { return ( <span key={index}> {mot} <br /> </span> ); })} </h3> <motion.div className=“infos” variants={infosVariants} initial=“initial” animate=“visible” exit=“initial” style={{ gridArea: infosGridPosition }} > <p className=“description”>{description}</p> <div className=“les-reseaux”> {socialLinks.map((link, index) => { const type = getSocialNetworkType(link.reseau); if (type) { return ( <BoutonReseau key={index} href={link.reseau} type={type} /> ); } return null; })} </div> <Link to=”/programmation" state={{ comesFromPageArtist: idArtiste, oldX: oldX, oldY: oldY, oldGroupes: oldGroupes, }} className=“btn-retour” > <svg width=“36” height=“28” viewBox=“0 0 36 28” fill=“none” xmlns=“http://www.w3.org/2000/svg” > <rect x=“6.52539” y=“0.321533” width=“35.8974” height=“3.58974” rx=“1.79487” transform=“rotate(45 6.52539 0.321533)” /> <rect x=“3.87891” y=“25.5957” width=“35.8974” height=“3.58974” rx=“1.79487” transform=“rotate(-45 3.87891 25.5957)” /> </svg> </Link> </motion.div> <div className=“date-heure”> <h4>{date}</h4> <h4>{heure}</h4> </div> </div> </div> ); } import { useEffect, useState, useRef, useLayoutEffect } from “react”; import SearchBar from “…/…/components/form/SearchBar”; import Combo from “…/…/components/form/Combo”; import CarteArtiste from “…/…/components/Artiste/CarteProgrammation”; import { motion } from “framer-motion”; import { useLocation } from “react-router-dom”; import axios from “axios”; import Footer from “…/…/components/footer”; import CarteProgrammation from “…/…/components/Artiste/CarteProgrammation”; import { AnimatePresence } from “framer-motion”; type Props = { isNavInFocus: boolean; setIsNavTransparent: (isNavTransparent: boolean) => void; }; type Groupe = { idG: number; nomG: string; descriptionG: string; datePassage: string; heurePassage: string; }; type Artiste = { descriptionA?: string; idMG?: number; idG: number; nomDeSceneMG: string; nomMG: string; prenomMG?: string; datePassage?: string; heurePassage?: string; }; type Evenement = { dateDebutE: string; dateFinE: string; heureDebutE: string; heureFinE: string; idE: number; idG: number; idL: number | null; nomE: string; }; type Programme = Groupe | Evenement | Artiste; export default function Programmation(props: Props) { const location = useLocation(); const idArtistComingFrom = location.state?.comesFromPageArtist; const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [searchTerm, setSearchTerm] = useState(“”); window.history.replaceState({}, document.title); const [lesGroupes, setLesGroupes] = useState<Groupe[]>(location.state ? oldGroupes : []); const [lesArtistes, setLesArtistes] = useState<Artiste[]>([]); const groupePassageMap = useRef< Map<number, { datePassage: string; heurePassage: string }> >(new Map()); useEffect(() => { axios.get(“http://localhost:8080/getGroupesWithEvenements”).then((res) => { const groupedData = res.data as Programme[][]; // console.log(“groupedData”) // console.log(groupedData); const listeGroupes: Groupe[] = []; const groupSet = new Set<number>(); const listeArtistes: Artiste[] = []; groupedData.forEach((groupArray) => { let groupeObj: Partial<Groupe> = {}; let artisteObj: Partial<Artiste> = {}; groupArray.forEach((item) => { if (“nomG” in item) { groupeObj = { …groupeObj, …item }; } else if (“nomDeSceneMG” in item) { artisteObj = { …artisteObj, …item }; } else if (“dateDebutE” in item) { const datePassage = item.dateDebutE; const heurePassage = item.heureDebutE; if (groupeObj.idG === item.idG) { groupeObj.datePassage = datePassage; groupeObj.heurePassage = heurePassage; } if (artisteObj.idG === item.idG) { artisteObj.datePassage = datePassage; artisteObj.heurePassage = heurePassage; } } }); // si l’id n’a pas encore était ajoutée à la liste on ajoute le groupe if (groupeObj.idG !== undefined && !groupSet.has(groupeObj.idG)) { listeGroupes.push(groupeObj as Groupe); groupSet.add(groupeObj.idG); } if (artisteObj.nomDeSceneMG !== undefined) { listeArtistes.push(artisteObj as Artiste); } }); setLesGroupes(listeGroupes); // console.log(listeGroupes); groupePassageMap.current.clear(); listeGroupes.forEach((groupe) => { groupePassageMap.current.set(groupe.idG, { datePassage: groupe.datePassage, heurePassage: groupe.heurePassage, }); }); setLesArtistes(listeArtistes); }); }, []); // let filteredGroupes = lesGroupes; // if (searchTerm) { // filteredGroupes = lesGroupes.filter((groupe) => // groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()) // ); // } // let filteredArtistes = lesArtistes; // if (searchTerm) { // filteredArtistes = lesArtistes.filter((artiste) => // artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()) // ) // } const [filtreDate, setFiltreDate] = useState(“Tout”); console.log(filtreDate) let termeRechercher = ‘Tout’; if (filtreDate === ‘21 Juillet’) { termeRechercher = ‘07-21’; } else if (filtreDate === ‘22 Juillet’) { termeRechercher = ‘07-22’; } else if (filtreDate === ‘23 Juillet’) { termeRechercher = ‘07-23’; } let filteredGroupes = lesGroupes; if (searchTerm) { filteredGroupes = filteredGroupes.filter((groupe) => { console.log(groupe.nomG); if (searchTerm.length > 5) { return groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return groupe.nomG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== ‘Tout’) { console.log(termeRechercher) filteredGroupes = filteredGroupes.filter((groupe) => { return groupe.datePassage.includes(termeRechercher); }); } let filteredArtistes = lesArtistes; console.log(“Les Artistes avant filtre:”, lesArtistes); if (searchTerm) { console.log(“Filtered Artistes après searchTerm:”, filteredArtistes); filteredArtistes = filteredArtistes.filter((artiste) => { if (searchTerm.length > 5) { return artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return artiste.nomDeSceneMG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== ‘Tout’) { filteredArtistes = filteredArtistes.filter((artiste) => { console.log(“Filtered Artistes après searchTerm:”, filteredArtistes); const groupeInfo = groupePassageMap.current.get(artiste.idG); return groupeInfo?.datePassage.includes(termeRechercher); }); } console.log(filteredGroupes); console.log(filteredArtistes); const [filtreAffichage, setFiltreAffichage] = useState(“Grille”); const [filtreGenre, setFiltreGenre] = useState(“Tout”); const pageRef = useRef<HTMLDivElement>(null); const contentVariants = { visible: { filter: “blur(0px)”, scale: 1, zIndex: 1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, hidden: { filter: “blur(10px)”, scale: 0.8, zIndex: -1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, }; useEffect(() => { window.scrollTo(0, 0); props.setIsNavTransparent(false); }, []); return ( <> <motion.div id=“Programmation” className=“page-defaut” variants={contentVariants} animate={props.isNavInFocus ? “hidden” : “visible”} ref={pageRef} > <header> <div className=“title”> <h2>PROGRAMMATION</h2> <svg width=“64” height=“64” viewBox=“0 0 64 64” fill=“none” xmlns=“http://www.w3.org/2000/svg” > <path d=“M62.9991 27.739L42.1815 27.7675L56.8787 13.0286L50.7001 6.86056L36.0029 21.5994L35.9744 0.785744L27.2406 0.797718L27.2692 21.6114L12.5316 6.91288L6.36413 13.0979L21.1017 27.7964L0.289932 27.825L0.301899 36.5537L21.1137 36.5251L6.41646 51.2641L12.6009 57.4321L27.2981 42.6932L27.3266 63.5069L36.0603 63.4949L36.0318 42.6812L50.7694 57.3798L56.931 51.1948L42.1934 36.4962L63.011 36.4677L62.9991 27.739Z” fill=“#FFD600” /> </svg> </div> <div className=“filters-container”> <div className=“filters”> <Combo title=“DATE” choices={[“Tout”, “21 Juillet”, “22 Juillet”, “23 Juillet”]} currentChoice={filtreDate} setCurrentChoice={setFiltreDate} /> <Combo title=“AFFICHAGE” choices={[“Grille”, “Horaires”]} currentChoice={filtreAffichage} setCurrentChoice={setFiltreAffichage} /> <Combo title=“GENRE” choices={[“Tout”, “Rap”, “Rock”, “Pop”]} currentChoice={filtreGenre} setCurrentChoice={setFiltreGenre} /> </div> <SearchBar text=“Rechercher un artiste” onSearch={setSearchTerm} /> </div> </header> <main className=“liste-artistes”> <AnimatePresence> {filteredGroupes.map((groupe) => { console.log(‘idArtistComingFrom for groupe:’, idArtistComingFrom); console.log(“groupe.idG : “); console.log(groupe.idG); return ( <CarteProgrammation key={groupe.idG} id={groupe.idG} nomArtiste={groupe.nomG} description={groupe.descriptionG} date={groupe.datePassage} heure={groupe.heurePassage} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == groupe.idG ? oldX : null} oldY={idArtistComingFrom == groupe.idG ? oldY : null} comesFromPageArtist={idArtistComingFrom == groupe.idG} /> ); })} {filteredArtistes.map((artiste) => { console.log(‘idArtistComingFrom for artiste:’, idArtistComingFrom); const groupeInfo = groupePassageMap.current.get(artiste.idG); console.log(“artiste.idMG : “); console.log(artiste.idMG); return ( <CarteProgrammation key={artiste.idMG} id={artiste.idMG} nomArtiste={artiste.nomDeSceneMG} description={artiste.descriptionA} date={groupeInfo?.datePassage ?? “Date inconnue”} heure={groupeInfo?.heurePassage ?? “Heure inconnue”} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == artiste.idMG ? oldX : null} oldY={idArtistComingFrom == artiste.idMG ? oldY : null} comesFromPageArtist={idArtistComingFrom == artiste.idMG} /> ); })} </AnimatePresence> </main> </motion.div> <Footer /> </> ); } import { TargetAndTransition, motion } from “framer-motion”; import { useContext, useState } from “react”; import Button from “…/components/form/Button”; import { getCookie, setCookie } from “…/cookies/CookiesLib.tsx”; import { CartContext } from “…/App”; const initialDays = { “20 Jui”: false, “21 Jui”: false, “22 Jui”: false, }; type Props = { id: number; title: string; price: number | string; nbTicket: number; isForfait?: boolean; disabled?: boolean; }; export default function TicketCard({ id, title, price, nbTicket, isForfait, }: Props) { const [isOpen, setIsOpen] = useState(false); const [tickets, setTickets] = useState(nbTicket); const [rotation, setRotation] = useState(0); const [days, setDays] = useState(initialDays); const { cart, setCart } = useContext(CartContext); const [isLoading, setIsLoading] = useState(false); const [isAdded, setIsAdded] = useState(false); type DayKey = “20 Jui” |“21 Jui”|“22 Jui”; const pricesByDay:{[key in DayKey]: number} = { “20 Jui”: 60, “21 Jui”: 80, “22 Jui”: 90, }; const selectedDayCount = () => { return Object.values(days).filter(Boolean).length; } const isDisabled = tickets === 0 || (isForfait && selectedDayCount() !== 2); const handleTicketChange = (newTickets: number, event: React.MouseEvent) => { event.stopPropagation(); setTickets(newTickets); } const addToCartHandler = async ( event: React.MouseEvent<HTMLButtonElement> ) => { event.stopPropagation(); setIsLoading(true); // Longueur actuelle du panier dans les cookies const selectedDaysSortedString = isForfait ? Object.entries(days) .filter(([_, isChosen]) => isChosen) .map(([day, ]) => day) .sort() .join(‘-’) : null; const calculPrixSelonJourSelec = isForfait && selectedDaysSortedString ? selectedDaysSortedString.split(‘-’).reduce((total, dayKey) => { return total + pricesByDay[dayKey as DayKey]; }, 0) : (typeof price === “number” ? price : 0); const uniqueId = isForfait && selectedDaysSortedString ? ${id}-${selectedDaysSortedString} : ${id}; const itemForCart = { id, uniqueId, title, price: isForfait ? calculPrixSelonJourSelec : (typeof price === “number” ? price : 0), quantity: tickets, selectedDaysSortedString: isForfait ? selectedDaysSortedString : null, }; // const currentCartLength = (getCookie(“cart”) || []).length; // condition non valide car si on ajoute un typê billet déjà présent dans le panier, // l’ajout des nouveaux billets ne ferons que changer la valeur de quantité dans le tableau et // donc la longueur du tableau ne changera pas // Initialise la vérification de l’ajout let expectedNewQuantity = itemForCart.quantity; let newCart = […cart]; const forfaitIndex = newCart.findIndex((billet) => billet.uniqueId === uniqueId); // si l’article existait déjà dans le panie on garde la quantité précédente et on détermine la nouvelle quantité attendue if (forfaitIndex > -1) { // si ce type de billet existe déjà, ajoute à la quantité et calcule la nouvelle quantité attendue expectedNewQuantity += newCart[forfaitIndex].quantity; newCart[forfaitIndex].quantity = expectedNewQuantity; } else { // sinon on l’ajoute au panier newCart.push(itemForCart); } setCart(newCart); setCookie(“cart”, newCart, { expires: 7, sameSite: “None”, secure: true }); setTimeout(() => { // trouver l’article après la mise à jour pour vérifier que la quantité est correcte const newForfaitIndex = newCart.findIndex((billet) => billet.uniqueId === uniqueId); const hasCorrectQuantity = newForfaitIndex > -1 ? newCart[newForfaitIndex].quantity === expectedNewQuantity : false; if (hasCorrectQuantity) { setIsAdded(true); } else { console.error(‘Erreur : la quantité dans le panier ne correspond pas à la quantité attendue.’); } setIsLoading(false); // Afficher le message ‘ARTICLE AJOUTÉ’ pendant un certain temps setTimeout(() => { setIsAdded(false); }, 2000); }, 400); }; const displayDay = (day: string) => day.replace(“Juillet”, “Juillet”); const buttonVariants: { [key: string]: TargetAndTransition } = { tap: { scale: 0.95 }, selected: { scale: 1.1, backgroundColor: “#E45A3B” }, unselected: { scale: 1, backgroundColor: “#FFFFFF” }, }; const contentVariants = { closed: { opacity: 0, height: 0, overflow: “hidden”, transition: { duration: 0.2, ease: “easeInOut”, when: “afterChildren”, }, }, open: { opacity: 1, height: title === “Forfait 2 jours” ? 160 : 150, transition: { duration: 0.2, ease: “easeInOut”, when: “beforeChildren”, }, }, }; const cardVariants = { hidden: { opacity: 0, y: 50 }, visible: { opacity: 1, y: 0, transition: { type: “spring”, stiffness: 120, }, }, }; const calculateSubTotal = () => { if (isForfait) { // Calculer le prix total en fonction des jours sélectionnés const totalSelectedDaysPrice = Object.entries(days) .filter(([, isSelected]) => isSelected) .reduce((total, [day]) => { return total + pricesByDay[day as keyof typeof pricesByDay]; }, 0); return tickets * totalSelectedDaysPrice; } else { return tickets * (typeof price === ‘number’ ? price : parseInt(price, 10)); } }; const maxSelectedDays = 2; const toggleDaySelection = (day: keyof typeof initialDays) => { setDays((prevDays) => { const isSelected = prevDays[day]; const count = selectedDayCount(); if (!isSelected && count >= maxSelectedDays) { return prevDays; } return { …prevDays, [day]: !prevDays[day] }; }); }; return ( <motion.div className=“ticket-card” layout initial=“hidden” animate=“visible” variants={cardVariants} onClick={() => { setIsOpen(!isOpen); setRotation(rotation === 0 ? 90 : 0); }} > <div className=“content”> <div className=“left-part”> <h4>{title}</h4> <p>Les tickets ne sont pas remboursables.</p> <p>Dernière entrée à 11H.</p> </div> <div className=“right-part”> <p>{price}€</p> <motion.div className=“svg-container” animate={{ rotate: rotation }}> <svg xmlns=“http://www.w3.org/2000/svg” width=“13” height=“20” viewBox=“0 0 13 20” fill=“none” > <path d=“M2 18L10 10L2 2” stroke=”#4E4E4E” strokeWidth=“4” /> </svg> </motion.div> </div> </div> <motion.div className={sub-menu ${<br/> title === "Forfait 2 jours" ? "forfait-2j" : ""<br/> }} variants={contentVariants} initial=“closed” animate={isOpen ? “open” : “closed”} exit=“closed” > <div className=“top-partsubmenu”> <div className=“left-part-sub”> <div className=“sub-menu-left-part”> <div className=“rect”> <img className=”” src=“images/billet_pass1j.png” alt=“Billet pass 1 jour” /> </div> <div className=“container”></div> <div className=“article-select”> <svg xmlns=“http://www.w3.org/2000/svg” width=“22” height=“21” viewBox=“0 0 22 21” fill=“none” > <path d=“M22 9.03848H14.6966L19.8599 4.10947L17.6953 2.04109L12.532 6.97007V0H9.46799V6.97007L4.30475 2.04109L2.13807 4.10947L7.30131 9.03848H0V11.9615H7.30131L2.13807 16.8906L4.30475 18.9589L9.46799 14.0299V21H12.532V14.0299L17.6953 18.9589L19.8599 16.8906L14.6966 11.9615H22V9.03848Z” fill=“#FFD600” /> </svg> <p>x{tickets} Article(s) sélectionné(s)</p> </div> </div> <div className=“ticket-control”> <button className=“minusButton” onClick={(event) => handleTicketChange(Math.max(tickets - 1, 0), event) } > - </button> <span>{tickets}</span> <button className=“sommeButton” onClick={(event) => handleTicketChange(tickets + 1, event)} > + </button> </div> </div> </div> <div className=“delimiter-submenu”></div> <div className=“bottom-partsubmenu”> <div className=“bottom-part-left”> <div className=“day-checkbox-container”> {isForfait && (Object.keys(days) as Array<keyof typeof initialDays>).map( (day) => ( <motion.label key={day} className=“day-checkbox” whileTap=“tap” > <input type=“checkbox” checked={days[day]} onChange={() => toggleDaySelection(day)} style={{ display: “none” }} /> <motion.div className=“day-button” variants={buttonVariants} animate={days[day] ? “selected” : “unselected”} > {displayDay(day)} </motion.div> </motion.label> ) )} </div> <p>Sous-total</p> <p>{calculateSubTotal().toFixed(2)}€</p> </div> <Button text={ isLoading ? “Chargement…” : isAdded ? “ARTICLE AJOUTÉ” : “AJOUT PANIER” } isLoading={isLoading} onClick={(event: React.MouseEvent<HTMLElement>) => addToCartHandler(event as React.MouseEvent<HTMLButtonElement>) } isDisabled={isDisabled} /> </div> </motion.div> </motion.div> ); }
bf39605abf2186401890d371a9273cb1
{ "intermediate": 0.41937360167503357, "beginner": 0.4355344772338867, "expert": 0.14509190618991852 }
37,711
Implementing a detailed code example for MEP clash resolution using DDPG
b91fa8711b2daa7f388f023fe9336990
{ "intermediate": 0.3491774797439575, "beginner": 0.122515469789505, "expert": 0.5283070206642151 }
37,712
Implementing a detailed code for MEP clash resolution using DDPG
ff397bf5488557b93708139248131b34
{ "intermediate": 0.2764882743358612, "beginner": 0.11368250101804733, "expert": 0.6098292469978333 }
37,713
give me a python code code named jestur
a516a3c41b1e49399edf678ddc5c6e95
{ "intermediate": 0.2792348861694336, "beginner": 0.28945282101631165, "expert": 0.43131223320961 }
37,714
j'ai 2 routes, une pour avoir les reseaux des membres et une pour avoir les reseaux sociaux des groupes, comment les différenciez ? @app.route('/getSocialLinks/<int:id>') def getRS(id): connexion_bd = ConnexionBD() lienRS = LienRS_BD(connexion_bd) print("test") res = lienRS.get_liensRS_membre_json(id) print(res) if res is None: return jsonify({"error": "Aucun artiste trouve"}) else: return res @app.route('/getMemberSocialLinks/<int:idMG>') def get_member_social_links(idMG): connexion_bd = ConnexionBD() lienRS = LienRS_BD(connexion_bd) res = lienRS.get_liensRS_membre_par_id_json(idMG) if res is None: return jsonify({"error": "Aucun lien de réseau social trouvé pour le membre"}) else: return res import { useEffect, useState, useRef, useLayoutEffect } from "react"; import SearchBar from "../../components/form/SearchBar"; import Combo from "../../components/form/Combo"; import CarteArtiste from "../../components/Artiste/CarteProgrammation"; import { motion } from "framer-motion"; import { useLocation } from "react-router-dom"; import axios from "axios"; import Footer from "../../components/footer"; import CarteProgrammation from "../../components/Artiste/CarteProgrammation"; import { AnimatePresence } from "framer-motion"; type Props = { isNavInFocus: boolean; setIsNavTransparent: (isNavTransparent: boolean) => void; }; type Groupe = { idG: number; nomG: string; descriptionG: string; datePassage: string; heurePassage: string; }; type Artiste = { descriptionA?: string; idMG?: number; idG: number; nomDeSceneMG: string; nomMG: string; prenomMG?: string; datePassage?: string; heurePassage?: string; }; type Evenement = { dateDebutE: string; dateFinE: string; heureDebutE: string; heureFinE: string; idE: number; idG: number; idL: number | null; nomE: string; }; type Programme = Groupe | Evenement | Artiste; export default function Programmation(props: Props) { const location = useLocation(); const idArtistComingFrom = location.state?.comesFromPageArtist; const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [searchTerm, setSearchTerm] = useState(""); window.history.replaceState({}, document.title); const [lesGroupes, setLesGroupes] = useState<Groupe[]>(location.state ? oldGroupes : []); const [lesArtistes, setLesArtistes] = useState<Artiste[]>([]); const groupePassageMap = useRef< Map<number, { datePassage: string; heurePassage: string }> >(new Map()); useEffect(() => { axios.get("http://localhost:8080/getGroupesWithEvenements").then((res) => { const groupedData = res.data as Programme[][]; // console.log("groupedData") // console.log(groupedData); const listeGroupes: Groupe[] = []; const groupSet = new Set<number>(); const listeArtistes: Artiste[] = []; groupedData.forEach((groupArray) => { let groupeObj: Partial<Groupe> = {}; let artisteObj: Partial<Artiste> = {}; groupArray.forEach((item) => { if ("nomG" in item) { groupeObj = { ...groupeObj, ...item }; } else if ("nomDeSceneMG" in item) { artisteObj = { ...artisteObj, ...item }; } else if ("dateDebutE" in item) { const datePassage = item.dateDebutE; const heurePassage = item.heureDebutE; if (groupeObj.idG === item.idG) { groupeObj.datePassage = datePassage; groupeObj.heurePassage = heurePassage; } if (artisteObj.idG === item.idG) { artisteObj.datePassage = datePassage; artisteObj.heurePassage = heurePassage; } } }); // si l'id n'a pas encore était ajoutée à la liste on ajoute le groupe if (groupeObj.idG !== undefined && !groupSet.has(groupeObj.idG)) { listeGroupes.push(groupeObj as Groupe); groupSet.add(groupeObj.idG); } if (artisteObj.nomDeSceneMG !== undefined) { listeArtistes.push(artisteObj as Artiste); } }); setLesGroupes(listeGroupes); // console.log(listeGroupes); groupePassageMap.current.clear(); listeGroupes.forEach((groupe) => { groupePassageMap.current.set(groupe.idG, { datePassage: groupe.datePassage, heurePassage: groupe.heurePassage, }); }); setLesArtistes(listeArtistes); }); }, []); // let filteredGroupes = lesGroupes; // if (searchTerm) { // filteredGroupes = lesGroupes.filter((groupe) => // groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()) // ); // } // let filteredArtistes = lesArtistes; // if (searchTerm) { // filteredArtistes = lesArtistes.filter((artiste) => // artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()) // ) // } const [filtreDate, setFiltreDate] = useState("Tout"); console.log(filtreDate) let termeRechercher = 'Tout'; if (filtreDate === '21 Juillet') { termeRechercher = '07-21'; } else if (filtreDate === '22 Juillet') { termeRechercher = '07-22'; } else if (filtreDate === '23 Juillet') { termeRechercher = '07-23'; } let filteredGroupes = lesGroupes; if (searchTerm) { filteredGroupes = filteredGroupes.filter((groupe) => { console.log(groupe.nomG); if (searchTerm.length > 5) { return groupe.nomG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return groupe.nomG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== 'Tout') { console.log(termeRechercher) filteredGroupes = filteredGroupes.filter((groupe) => { return groupe.datePassage.includes(termeRechercher); }); } let filteredArtistes = lesArtistes; console.log("Les Artistes avant filtre:", lesArtistes); if (searchTerm) { console.log("Filtered Artistes après searchTerm:", filteredArtistes); filteredArtistes = filteredArtistes.filter((artiste) => { if (searchTerm.length > 5) { return artiste.nomDeSceneMG.toLowerCase().includes(searchTerm.toLowerCase()); } else { return artiste.nomDeSceneMG.toLowerCase().startsWith(searchTerm.toLowerCase()); } }); } if (filtreDate !== 'Tout') { filteredArtistes = filteredArtistes.filter((artiste) => { console.log("Filtered Artistes après searchTerm:", filteredArtistes); const groupeInfo = groupePassageMap.current.get(artiste.idG); return groupeInfo?.datePassage.includes(termeRechercher); }); } console.log(filteredGroupes); console.log(filteredArtistes); const [filtreAffichage, setFiltreAffichage] = useState("Grille"); const [filtreGenre, setFiltreGenre] = useState("Tout"); const pageRef = useRef<HTMLDivElement>(null); const contentVariants = { visible: { filter: "blur(0px)", scale: 1, zIndex: 1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, hidden: { filter: "blur(10px)", scale: 0.8, zIndex: -1, transition: { duration: 0.5, ease: [1, 0, 0, 1], }, }, }; useEffect(() => { window.scrollTo(0, 0); props.setIsNavTransparent(false); }, []); return ( <> <motion.div id="Programmation" className="page-defaut" variants={contentVariants} animate={props.isNavInFocus ? "hidden" : "visible"} ref={pageRef} > <header> <div className="title"> <h2>PROGRAMMATION</h2> <svg width="64" height="64" viewBox="0 0 64 64" fill="none" xmlns="http://www.w3.org/2000/svg" > <path d="M62.9991 27.739L42.1815 27.7675L56.8787 13.0286L50.7001 6.86056L36.0029 21.5994L35.9744 0.785744L27.2406 0.797718L27.2692 21.6114L12.5316 6.91288L6.36413 13.0979L21.1017 27.7964L0.289932 27.825L0.301899 36.5537L21.1137 36.5251L6.41646 51.2641L12.6009 57.4321L27.2981 42.6932L27.3266 63.5069L36.0603 63.4949L36.0318 42.6812L50.7694 57.3798L56.931 51.1948L42.1934 36.4962L63.011 36.4677L62.9991 27.739Z" fill="#FFD600" /> </svg> </div> <div className="filters-container"> <div className="filters"> <Combo title="DATE" choices={["Tout", "21 Juillet", "22 Juillet", "23 Juillet"]} currentChoice={filtreDate} setCurrentChoice={setFiltreDate} /> <Combo title="AFFICHAGE" choices={["Grille", "Horaires"]} currentChoice={filtreAffichage} setCurrentChoice={setFiltreAffichage} /> <Combo title="GENRE" choices={["Tout", "Rap", "Rock", "Pop"]} currentChoice={filtreGenre} setCurrentChoice={setFiltreGenre} /> </div> <SearchBar text="Rechercher un artiste" onSearch={setSearchTerm} /> </div> </header> <main className="liste-artistes"> <AnimatePresence> {filteredGroupes.map((groupe) => { console.log('idArtistComingFrom for groupe:', idArtistComingFrom); console.log("groupe.idG : "); console.log(groupe.idG); return ( <CarteProgrammation key={groupe.idG} id={groupe.idG} nomArtiste={groupe.nomG} description={groupe.descriptionG} date={groupe.datePassage} heure={groupe.heurePassage} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == groupe.idG ? oldX : null} oldY={idArtistComingFrom == groupe.idG ? oldY : null} comesFromPageArtist={idArtistComingFrom == groupe.idG} /> ); })} {filteredArtistes.map((artiste) => { console.log('idArtistComingFrom for artiste:', idArtistComingFrom); const groupeInfo = groupePassageMap.current.get(artiste.idG); console.log("artiste.idMG : "); console.log(artiste.idMG); return ( <CarteProgrammation key={artiste.idMG} id={artiste.idMG} nomArtiste={artiste.nomDeSceneMG} description={artiste.descriptionA} date={groupeInfo?.datePassage ?? "Date inconnue"} heure={groupeInfo?.heurePassage ?? "Heure inconnue"} setIsNavTransparent={props.setIsNavTransparent} oldGroupes={lesGroupes} oldX={idArtistComingFrom == artiste.idMG ? oldX : null} oldY={idArtistComingFrom == artiste.idMG ? oldY : null} comesFromPageArtist={idArtistComingFrom == artiste.idMG} /> ); })} </AnimatePresence> </main> </motion.div> <Footer /> </> ); } import { useEffect, useState, useRef } from "react"; import { Link, useLocation } from "react-router-dom"; import BoutonReseau from "../../components/Artiste/BoutonReseau"; import { motion } from "framer-motion"; import axios from "axios"; export default function PageEvenement() { const location = useLocation(); const oldX = location.state?.oldX; const oldY = location.state?.oldY; const oldGroupes = location.state?.oldGroupes; const [nomArtiste, setNomArtiste] = useState(location.state?.nomArtiste); const [date, setDate] = useState(location.state?.date); const [heure, setHeure] = useState(location.state?.heure); const titleRef = useRef<HTMLHeadingElement>(null); const [description, setDescription] = useState( location.state?.description || "Description par défaut si aucune description n'est passée." ); const [socialLinks, setSocialLinks] = useState<{ reseau: string }[]>([]); const params = new URLSearchParams(window.location.search); const idArtiste = params.get("id"); const [windowWidth, setWindowWidth] = useState(window.innerWidth); const [infosGridPosition, setInfosGridPosition] = useState<"top" | "bottom">( "top" ); function getSocialNetworkType( url: string ): | "soundcloud" | "spotify" | "instagram" | "twitter" | "youtube" | "inconnu" { if (url.includes("soundcloud")) { return "soundcloud"; } else if (url.includes("spotify")) { return "spotify"; } else if (url.includes("instagram")) { return "instagram"; } else if (url.includes("twitter")) { return "twitter"; } else if (url.includes("youtube")) { return "youtube"; } else { return "inconnu"; } } useEffect(() => { axios .get("http://localhost:8080/getSocialLinks/" + idArtiste) .then((response) => { setSocialLinks(response.data); console.log(response.data); }) .catch((error) => { console.error( "Erreur lors de la récupération des liens de réseaux sociaux", error ); }); const handleResize = () => { setWindowWidth(window.innerWidth); }; window.addEventListener("resize", handleResize); return () => { window.removeEventListener("resize", handleResize); }; }, []); useEffect(() => { console.log('oldXEvenement:', oldX); console.log('oldYEvenement:', oldY); if (titleRef.current) { // regarde si la width du titre est plus grande que 25% de la width de la fenetre - 2*3rem if (titleRef.current.offsetWidth > windowWidth / 4 - 2 * 3 * 16) { setInfosGridPosition("top"); } else { setInfosGridPosition("bottom"); } } }, [titleRef, windowWidth]); const infosVariants = { initial: { opacity: 0, transition: { duration: 0.1, ease: "easeInOut", }, }, visible: { opacity: 1, transition: { delay: 0.8, duration: 0.6, ease: "easeInOut", }, }, }; const overlayVariants = { initial: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 20%, rgba(0, 0, 0, 0) 60%)", transition: { duration: 0.1, ease: "easeInOut", }, }, extended: { background: "linear-gradient(to top, rgba(0, 0, 0, 0.7) 40%, rgba(0, 0, 0, 0) 100%)", transition: { delay: 0.6, duration: 0.6, ease: "easeInOut", }, }, }; return ( <div id="PageArtiste"> <motion.div className="overlay" variants={overlayVariants} initial="initial" animate={infosGridPosition === "top" ? "extended" : "initial"} /> <img src={"http://localhost:8080/getImageArtiste/" + idArtiste} alt="image de fond" /> <div className="content" style={{ columnGap: infosGridPosition === "top" ? "0" : "5rem", rowGap: windowWidth > 991 ? "0" : "5rem", }} > <h3 ref={titleRef}> {nomArtiste .toUpperCase() .split(" ") .map((mot: string, index: number) => { return ( <span key={index}> {mot} <br /> </span> ); })} </h3> <motion.div className="infos" variants={infosVariants} initial="initial" animate="visible" exit="initial" style={{ gridArea: infosGridPosition }} > <p className="description">{description}</p> <div className="les-reseaux"> {socialLinks.map((link, index) => { const type = getSocialNetworkType(link.reseau); if (type) { return ( <BoutonReseau key={index} href={link.reseau} type={type} /> ); } return null; })} </div> <Link to="/programmation" state={{ comesFromPageArtist: idArtiste, oldX: oldX, oldY: oldY, oldGroupes: oldGroupes, }} className="btn-retour" > <svg width="36" height="28" viewBox="0 0 36 28" fill="none" xmlns="http://www.w3.org/2000/svg" > <rect x="6.52539" y="0.321533" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(45 6.52539 0.321533)" /> <rect x="3.87891" y="25.5957" width="35.8974" height="3.58974" rx="1.79487" transform="rotate(-45 3.87891 25.5957)" /> </svg> </Link> </motion.div> <div className="date-heure"> <h4>{date}</h4> <h4>{heure}</h4> </div> </div> </div> ); }
07e98a3fe2764d1f4bd3aece117cb714
{ "intermediate": 0.38824281096458435, "beginner": 0.42833611369132996, "expert": 0.18342110514640808 }
37,715
in this javascript for leaflet.js remove the first marker added by the user if they add another marker before pressing the submit button - map.on("click", function (e) { // Add user marker to the array var userMarker = L.marker(e.latlng).addTo(map); userMarkers.push(userMarker.getLatLng()); //add submitbutton document.getElementById("buttonsdiv").appendChild(submitButton); submitButton.onclick = function () { const marker = L.marker(e.latlng).addTo(map); const distance = L.latLng(e.latlng).distanceTo(referencePoint); // Calculate distanceInMiles before using it map.off("click"); // Create a bounds object encompassing both markers const bounds = L.latLngBounds([e.latlng, referencePoint]); // Zoom the map to fit those bounds map.fitBounds(bounds); //remove submit button and add next painting button document.getElementById("buttonsdiv").appendChild(nextButton); document.getElementById("buttonsdiv").removeChild(submitButton); const distanceInMiles = distance * 0.000621371; // Update total distance with clicked marker's distance totalDistance += distanceInMiles; roundDistances.push(distanceInMiles); // Add distance to the roundDistances array // Calculate the adjusted width, ensuring it's not negative const adjustedWidth = Math.max(0, linePosition - totalDistance); // Conditionally set the width to 0 if totalDistance is negative const mileLineWidth = totalDistance < 0 ? 0 : adjustedWidth; //adjust mile-line const mileLine = document.getElementById("mile-line"); const labels = document.querySelectorAll(".label"); const labelSpacing = 100; // Adjust based on your desired spacing labels.forEach((label, index) => { const offset = index * labelSpacing; label.style.left = `${offset}px`; }); labels.forEach((label, index) => { const offset = index * labelSpacing; label.style.left = `${offset}px`; }); mileLine.style.width = `${adjustedWidth}px`; mileLine.style.height = "12px"; mileLine.style.backgroundColor = "blue"; mileLine.style.position = "absolute"; mileLine.style.left = "0"; // Change button to "Results" if miles reaches 0 if (adjustedWidth === 0 || adjustedWidth < 0) { document.getElementById("buttonsdiv").removeChild(nextButton); document.getElementById("buttonsdiv").appendChild(resultsButton); } // display gamescore on button click resultsButton.onclick = function () { document.addEventListener("click", function (event) { const resultsinfoDiv = document.getElementById("resultsinfo"); // Check if the click was outside the resultsinfo div if (resultsinfoDiv.contains(event.target)) { // Close the resultsinfo div resultsinfoDiv.parentElement.removeChild(resultsinfoDiv); } });
09678f0dbc8cda7c33e24670c66b928e
{ "intermediate": 0.3263334333896637, "beginner": 0.396429181098938, "expert": 0.2772373855113983 }
37,716
how to extend or use an api in a .d.ts file in typescript
963a4b77b3d37c28b2984f373cb48a9b
{ "intermediate": 0.7592140436172485, "beginner": 0.16602538526058197, "expert": 0.07476058602333069 }
37,717
obsidian events.js I want to be able to understand how to use this class, how I can get events from this api, tl;dr I just want to understand how to use APIs. apparently can also extend the api using super? I dont know how to word my question other than I see that these methods exist but I dont know how to use, how to debug to see what they do to the program etc
60b42dfa45c967a44b13853397cc3d30
{ "intermediate": 0.9134055972099304, "beginner": 0.06632154434919357, "expert": 0.02027289569377899 }
37,718
Hello
18f0407148409230e68f38679aae313d
{ "intermediate": 0.3123404085636139, "beginner": 0.2729349136352539, "expert": 0.4147246778011322 }
37,719
Give the characteristics of the warring countries during Six-day war in 1967: Denotes the first and second sides of the conflict. - [CFT] Duration of the conflict in months (number of months) (0 if the conflict lasted less than 15 days) - [AS1] Army size of first country - [AS2] Army size of second country - [GL1] Geographical location of first country (check one) (number 0-7) 0. Greenland 1. North America 2. South America 3. Europe 4. Asia 5. Middle East 6. Africa 7. Australia - [GL2] Geographical location of second country (also one note) - [List of RL1{n}, where n - number 0-11] Religion of first country. If there are several religions in the country, show their ratio as a percentage (as float number from 0 to 1). 0. [RL10] Catholicism 1. [RL11] Protestantism 2. [RL12] Orthodoxy 3. [RL13] Sunni Islam 4. [RL14] Shia Islam 5. [RL15] Hinduism 6. [RL16] Buddhism 7. [RL17] Judaism 8. [RL18] Religions of Africa 9. [RL19] Taoism 10. [RL110] Confucianism 11. [RL111] Unknown or atheism - [List of RL2{n}] Religion of second country (same as above) - [List of PH1{n}, where n - number 0-6] Political characteristics of first country. (check those that apply) (0 - no, 1 - yes) 0. [PH10] Monarchy 1. [PH11] Republic 2. [PH12] Unitary state 3. [PH13] Federation 4. [PH14] Totalitarianism 5. [PH15] Authoritarianism 6. [PH16] Democracy - [List of PH2{n}] Political features of second country (same as above) - [ED1] Education level of the population of first country. Percent float value from 0 to 1 - [ED2] Education level of the population of second country. Percent float value from 0 to 1 - [WF1] Wealth of fossil resources of first country. Percent float value from 0 to 1 - [WF2] Fossil resource wealth of second country Percent float value from 0 to 1 - [MR1] Percentage of first country's maritime border. Float value from 0 to 1 (0% - landlocked, 100% - island country) - [MR2] Percentage of maritime border of second country. - [AT1] Average annual air temperature first country. Float value from 0 to 1, where 0= -30 celsius degrees, 1=50 celsius degrees - [AT2] Average annual air temperature of second country. Outcome of the conflict (check one): - [V1] = Victory of the first side (0 - no, 1 - yes) - [V2] = Victory of the second side (0 - no, 1 - yes) - [DR] = Draw (0 - no, 1 - yes) - [CSF] = Ceasefire (0 - no, 1 - yes) - [QUO] = Status quo (0 - no, 1 - yes) All data about the country helps in times of armed conflict. If some data is not exactly disclosed, try to find the maximum possible and plausible, for example, analyze neighboring countries and the situation in the world at the time of the military conflict. Try to keep the data as consistent as possible with historical information. The result must be presented as a sequence of numeric values separated by a space. Format: [CFT] [AS1] [AS2] [GL1] [GL2] [RL10] [RL11] [RL12] [RL13] [RL14] [RL15] [RL16] [RL17] [RL18] [RL19] [RL110] [RL111] [RL20] [RL21] [RL22] [RL23] [RL24] [RL25] [RL26] [RL27] [RL28] [RL29] [RL210] [RL211] [PH10] [PH11] [PH12] [PH13] [PH14] [PH15] [PH16] [PH20] [PH21] [PH22] [PH23] [PH24] [PH25] [PH26] [ED1] [ED2] [WF1] [WF2] [MR1] [MR2] [AT1] [AT2] [V1] [V2] [DR] [CSF] [QUO]
df31c2b92433720bf7e041824220c71e
{ "intermediate": 0.45171868801116943, "beginner": 0.32661160826683044, "expert": 0.2216697335243225 }
37,720
I want to use vite with a vanilla javascript project. I have a tailwind.config.js setup and a file called "base.css". How do I setup vite so that it reloads the page when I change the styles on an element?
d4e39d97236deb3166fe8f42a610a13f
{ "intermediate": 0.5426129698753357, "beginner": 0.26061415672302246, "expert": 0.19677288830280304 }
37,721
Hello
f7c1027e71488ee15f92c90d96057315
{ "intermediate": 0.3123404085636139, "beginner": 0.2729349136352539, "expert": 0.4147246778011322 }
37,722
import torch import torch.nn as nn import torch.nn.functional as F from torchtext.data.utils import get_tokenizer from torchtext.vocab import build_vocab_from_iterator import numpy as np import matplotlib.pyplot as plt from tqdm import tqdm # Define the Expert module class Expert(nn.Module): def __init__(self, input_dim, hidden_dim): super(Expert, self).__init__() self.fc1 = nn.Linear(input_dim, hidden_dim) self.activation = nn.ReLU() self.fc2 = nn.Linear(hidden_dim, input_dim) def forward(self, x): x = self.activation(self.fc1(x)) x = self.fc2(x) return x # Define the MoELayer module class MoELayer(nn.Module): def __init__(self, num_experts, input_dim, hidden_dim): super(MoELayer, self).__init__() self.experts = nn.ModuleList([Expert(input_dim, hidden_dim) for _ in range(num_experts)]) self.gate = nn.Linear(input_dim, num_experts) def forward(self, x): gating_distribution = F.softmax(self.gate(x), dim=1) expert_outputs = torch.stack([expert(x) for expert in self.experts], dim=1) output = torch.einsum('bi,bij->bj', gating_distribution, expert_outputs) # This efficiently computes the weighted sum of expert outputs return output # Define the SelectionMechanism module class SelectionMechanism(nn.Module): def __init__(self, input_dim, state_dim): super(SelectionMechanism, self).__init__() self.selection_layer = nn.Linear(input_dim + state_dim, state_dim) def forward(self, state, u): # Ensure both state and u have a batch dimension (state already has it) combined = torch.cat((state, u), dim=1) new_state = self.selection_layer(combined) return new_state # Define the StateSpaceMamba module class StateSpaceMamba(nn.Module): def __init__(self, input_dim, state_dim, num_experts, hidden_dim): super(StateSpaceMamba, self).__init__() self.state_dim = state_dim self.state = nn.Parameter(torch.zeros(1, state_dim)) self.input_layer = nn.Linear(input_dim, state_dim) self.selection_mechanism = SelectionMechanism(input_dim, state_dim) self.moe_layer = MoELayer(num_experts, state_dim, hidden_dim) def forward(self, input_sequence): batch_size = input_sequence.size(0) # Assuming the input is [batch_size, seq_length, input_dim] outputs = [] state = torch.zeros(batch_size, self.state_dim).to(input_sequence.device) # Initialize state for the current batch # Debugging: print shapes before loop print("input_sequence shape at forward entry:", input_sequence.shape) print("Initial state shape at forward entry:", state.shape) for u in input_sequence.split(1, dim=1): # Corrected: split along dimension 1 (time/sequence) u = u.squeeze(1) # Corrected: Remove the sequence dimension which is now dim 1 due to split # Debugging: Print shapes inside loop print("Input shape (u) at loop:", u.shape) state = self.selection_mechanism(state, u) print("State shape after selection mechanism:", state.shape) u_projected = self.input_layer(u) print("Input projection (u_projected) shape:", u_projected.shape) output = self.moe_layer(u_projected + state) print("Output shape after MoE layer:", output.shape) outputs.append(output) # Debugging: print shape of individual outputs and final stacked output print("Individual outputs shapes:", [o.shape for o in outputs]) output_tensor = torch.stack(outputs, dim=1) # Corrected: stack along dimension 1 (time/sequence) print("Stacked output shape:", output_tensor.shape) return output_tensor # Load the dataset try: print("Loading dataset...") with open("C:/Users/Dell-PC/Desktop/The-Physics-Dataset.txt", "r") as f: text = f.read() except FileNotFoundError: print("Error: Dataset file not found.") exit(1) # Tokenize the text try: print("Tokenizing text...") tokenizer = get_tokenizer("basic_english") tokens = tokenizer(text.lower()) except Exception as e: print("Error occurred during tokenization:", str(e)) exit(1) # Build the vocabulary try: print("Building vocabulary...") vocab = build_vocab_from_iterator([tokens], specials=["<unk>", "<pad>", "<sos>", "<eos>"]) vocab.set_default_index(vocab["<unk>"]) numericalized_tokens = [vocab["<sos>"]] + [vocab[token] for token in tokens] + [vocab["<eos>"]] except Exception as e: print("Error occurred during vocabulary building:", str(e)) exit(1) # Prepare the input sequence batch_size = 24 seq_length = 12 # This is the “token” size we observed in the debug outputs vocab_size = len(vocab) num_batches = (len(numericalized_tokens) - 1) // (batch_size * seq_length) if len(numericalized_tokens) < (batch_size * seq_length): print(f"Error: Dataset is too small for the chosen batch size and sequence length. Tokens available: {len(numericalized_tokens)}") exit(1) # Calculate the number of batches and ensure it’s not zero if num_batches == 0: print(f"Error: Number of batches is zero. Check if your dataset is large enough or consider reducing batch size/sequence length.") exit(1) numericalized_tokens = numericalized_tokens[:num_batches * batch_size * seq_length] input_sequence = torch.tensor(numericalized_tokens).view(-1, batch_size, seq_length) input_sequence = input_sequence.float() # Ensure input_sequence is of type float print("input_sequence shape:", input_sequence.shape) print("input_sequence dimension:", input_sequence.dim()) # Define the model using actual input_dim which corresponds to seq_length num_experts = 5 state_dim = 256 hidden_dim = 256 input_dim = seq_length # Corrected to use seq_length instead of vocab_size model = StateSpaceMamba(input_dim, state_dim, num_experts, hidden_dim) learning_rate = 0.001 print("model state shape:", model.state.shape) print("model state dimension:", model.state.dim()) # Training loop try: print("Training model…") def train(model, data, num_batches, batch_size, seq_length, output_features, epochs): model.train() loss_fn = nn.CrossEntropyLoss() optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate) loss_history = [] for epoch in tqdm(range(epochs)): total_loss = 0.0 # Debugging: print data tensor shape print("Data tensor shape:", data.shape) for i in range(0, data.size(1) - seq_length, seq_length): # Debugging: print current index and input-target shapes print(f"Current index: {i}") inputs = data[:, i:i+seq_length] print("Inputs shape:", inputs.shape) targets = data[:, i+1:i+1+seq_length] targets = targets[:, 1:] # remove the first element of each sequence targets = targets.reshape(-1) # flatten the tensor targets = targets.long() # convert to long tensor optimizer.zero_grad() outputs = model(inputs) # No need for transpose since the sequence is already the second dimension out of model. # We reshape outputs to match the total number of predictions for all sequences in all batches. outputs = outputs.reshape(-1, output_features) # Flatten the ouput predictions loss = loss_fn(outputs, targets) loss.backward() optimizer.step() # Validate that vocab_size actually matches the last dimension of outputs assert outputs.size(-1) == vocab_size, "Mismatch between output size and vocab size." total_loss += loss.item() epoch_loss = total_loss / num_batches loss_history.append(epoch_loss) print(f"Epoch {epoch}, Loss: {epoch_loss}") return loss_history # Same number of features as the last layer of the model, typically the logits layer. output_features = 256 epochs = 10 # Correct the train function call by replacing vocab_size with output_features loss_history = train(model, input_sequence, num_batches, batch_size, seq_length, output_features, epochs) # Plot loss history plt.plot(loss_history) plt.xlabel("Epochs") plt.ylabel("Loss") plt.title("Training Loss History") plt.show() except Exception as e: print("Error occurred during training:", str(e)) exit(1) # Generate function try: print("Generating text...") def generate(model, start_seq, length, temperature=0.8): model.eval() state = model.state numericalized_start = torch.tensor([vocab[token] for token in tokenizer(start_seq.lower())], dtype=torch.long).unsqueeze(0) input_seq = numericalized_start generated_sequence = [] for i in range(length): output = model(input_seq) # Sample from the network as a multinomial distribution output_prob = F.softmax(output[:, -1] / temperature, dim=-1).detach() predicted_index = torch.multinomial(output_prob, 1).item() generated_sequence.append(predicted_index) input_seq = torch.cat((input_seq, torch.tensor([[predicted_index]])), dim=1) return " ".join(vocab.lookup_tokens(generated_sequence)) # Generating a sample start_seq = "The fundamental concepts of physics" # Start of sequence to begin text generation generated_text = generate(model, start_seq, length=100) # Length of generated sequence print(generated_text) except Exception as e: print("Error occurred during text generation:", str(e)) exit(1)
8d451001b6950aacd9da4374702526b8
{ "intermediate": 0.30583521723747253, "beginner": 0.40679270029067993, "expert": 0.28737205266952515 }
37,723
html table each row in each table have english word at the begining of line and corresponding arabic word at the end of the line
73f92f63da5d21949e0ce09eae171e2a
{ "intermediate": 0.38687795400619507, "beginner": 0.2576725482940674, "expert": 0.35544952750205994 }
37,724
hi
fac0c578dfd42dbf47455196e82957e0
{ "intermediate": 0.3246487081050873, "beginner": 0.27135494351387024, "expert": 0.40399640798568726 }
37,725
Я строю предметную область Кинопоиск. Напиши на C++ реализацию класса Product? Вот атрибуты класса Product: - Name: string - Year: int - Genre: string - Director: list of Person - Screenplay: list of Person - Producer: list of Person - Operator: list of Person - Compositor: list of Person - Designer: list of Person - Editing: list of Person - Country: list of string - Budget: unsigned long - USA_box_office: unsigned long - World_box_office: unsigned long - Rus_box_office: unsigned long - Rus_premiere: Date - World_premiere: Date - Digital_release: Date - DVD_release: Date - Rus_age_rating: Rus_age - MPAA_rating: MPAA - Timing_in_mins: int - Starring: List of Person - Dubbing: List of Person - Rating: double - Users_reviews: list of Review и его методы: + getName(): string + setName(name: string): void + getYear(): int + setYear(year: int): void + getDirector(): list of Person + setDirector(director: list of Person): void + getScreenplay(): list of Person + setScreenplay(screenplay: list of Person): void + getProducer(): list of Person + setProducer(producer: list of Person): void + getOperator(): list of Person + setOperator(operator: list of Person): void + getCompositor(): list of Person + setCompositor(compositor: list of Person): void + getDesigner(): list of Person + setDesigner(designer: list of Person): void + getCountry(): list of string + setCountry(country: list of string): void + getBudget(): unsigned long + setBudget(budget: unsigned long): void + getUSABoxOffice(): unsigned long + setUSABoxOffice(boxOffice: unsigned long): void + getWorldBoxOffice(): unsigned long + setWorldBoxOffice(boxOffice: unsigned long): void + getRusBoxOffice(): unsigned long + setRusBoxOffice(boxOffice: unsigned long): void + getRusPremiere(): Date + setRusPremiere(premiere: Date): void + getWorldPremiere(): Date + setWorldPremiere(premiere: Date): void + getDigitalRelease(): Date + setDigitalRelease(release: Date): void + getDVDRelease(): Date + setDVDRelease(release: Date): void + getRusAgeRating(): Rus_age + setRusAgeRating(ageRating: Rus_age): void + getMPAARating(): MPAA + setMPAARating(rating: MPAA): void + getTimingInMins(): int + setTimingInMins(timing: int): void + getStarring(): list of Person + setStarring(starring: list of Person): void + getDubbing(): list of Person + setDubbing(dubbing: list of Person): void + getRating(): double + setRating(rating: double): void + getUsersReviews(): list of Review + setUsersReviews(reviews: list of Review): void
3959198e35deba16a270252a47a0b46b
{ "intermediate": 0.29876261949539185, "beginner": 0.36837059259414673, "expert": 0.33286672830581665 }
37,726
i want to close specific modal angular
8585f01d4e4fce146e8aa318e94f8b4e
{ "intermediate": 0.40593960881233215, "beginner": 0.3668341040611267, "expert": 0.22722627222537994 }
37,727
what is the difference between JedisNoReachableClusterNodeException and JedisConnectionException
dda3fa49313cd270aa0890a40d9a7240
{ "intermediate": 0.4853222072124481, "beginner": 0.1797986477613449, "expert": 0.3348791003227234 }
37,728
This is my data: Position Gene Annotation codon_pos ref alt 0 500 1000 1500 ... 55500 56000 56500 57000 57500 58000 58500 59000 59500 60000 0 201 thrL missense 2 T G 0.0 0.0 0.0 0.0 ... 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 0.0 1.0 1 1173 thrA missense 0 T G 0.0 0.0 0.0 0.0 ... 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 1.0 2 1624 thrA missense 1 T C 0.0 1.0 0.0 1.0 ... 0.0 0.0 0.0 0.0 0.0 1.0 0.0 0.0 0.0 I want to plot a line plot, where the X axis are times (columns from 0 to 60000) and Y axis is the amount of mutations. I want to plot this grouped by codon_pos. This means that the plot should have only three lines (each one representing a codon position).
292db518ce2dce7bf94e2cda6bf8cfaf
{ "intermediate": 0.38398072123527527, "beginner": 0.2782326936721802, "expert": 0.33778655529022217 }
37,729
Write a C# script : In unity, i have a animation rig called wounded_pose that i want to set its weight to wounded_pose.weight = 1f when my character is wounded, using the bool isInjured = true;
df15c39811804db1fd27627272038c18
{ "intermediate": 0.38909614086151123, "beginner": 0.2244597226381302, "expert": 0.3864441514015198 }
37,730
import gym import tensorflow as tf from tensorflow.keras import layers import numpy as np import matplotlib.pyplot as plt problem = "Pendulum-v1" env = gym.make(problem) num_states = env.observation_space.shape[0] print("Size of State Space -> {}".format(num_states)) num_actions = env.action_space.shape[0] print("Size of Action Space -> {}".format(num_actions)) upper_bound = env.action_space.high[0] lower_bound = env.action_space.low[0] print("Max Value of Action -> {}".format(upper_bound)) print("Min Value of Action -> {}".format(lower_bound)) class OUActionNoise: def __init__(self, mean, std_deviation, theta=0.15, dt=1e-2, x_initial=None): self.theta = theta self.mean = mean self.std_dev = std_deviation self.dt = dt self.x_initial = x_initial self.reset() def __call__(self): # Formula taken from https://www.wikipedia.org/wiki/Ornstein-Uhlenbeck_process. x = ( self.x_prev + self.theta * (self.mean - self.x_prev) * self.dt + self.std_dev * np.sqrt(self.dt) * np.random.normal(size=self.mean.shape) ) # Store x into x_prev # Makes next noise dependent on current one self.x_prev = x return x def reset(self): if self.x_initial is not None: self.x_prev = self.x_initial else: self.x_prev = np.zeros_like(self.mean) class Buffer: def __init__(self, buffer_capacity=100000, batch_size=64): # Number of "experiences" to store at max self.buffer_capacity = buffer_capacity # Num of tuples to train on. self.batch_size = batch_size # Its tells us num of times record() was called. self.buffer_counter = 0 # Instead of list of tuples as the exp.replay concept go # We use different np.arrays for each tuple element self.state_buffer = np.zeros((self.buffer_capacity, num_states)) self.action_buffer = np.zeros((self.buffer_capacity, num_actions)) self.reward_buffer = np.zeros((self.buffer_capacity, 1)) self.next_state_buffer = np.zeros((self.buffer_capacity, num_states)) # Takes (s,a,r,s') obervation tuple as input def record(self, obs_tuple): # Set index to zero if buffer_capacity is exceeded, # replacing old records index = self.buffer_counter % self.buffer_capacity self.state_buffer[index] = obs_tuple[0] self.action_buffer[index] = obs_tuple[1] self.reward_buffer[index] = obs_tuple[2] self.next_state_buffer[index] = obs_tuple[3] self.buffer_counter += 1 # Eager execution is turned on by default in TensorFlow 2. Decorating with tf.function allows # TensorFlow to build a static graph out of the logic and computations in our function. # This provides a large speed up for blocks of code that contain many small TensorFlow operations such as this one. @tf.function def update( self, state_batch, action_batch, reward_batch, next_state_batch, ): # Training and updating Actor & Critic networks. # See Pseudo Code. with tf.GradientTape() as tape: target_actions = target_actor(next_state_batch, training=True) y = reward_batch + gamma * target_critic( [next_state_batch, target_actions], training=True ) critic_value = critic_model([state_batch, action_batch], training=True) critic_loss = tf.math.reduce_mean(tf.math.square(y - critic_value)) critic_grad = tape.gradient(critic_loss, critic_model.trainable_variables) critic_optimizer.apply_gradients( zip(critic_grad, critic_model.trainable_variables) ) with tf.GradientTape() as tape: actions = actor_model(state_batch, training=True) critic_value = critic_model([state_batch, actions], training=True) # Used `-value` as we want to maximize the value given # by the critic for our actions actor_loss = -tf.math.reduce_mean(critic_value) actor_grad = tape.gradient(actor_loss, actor_model.trainable_variables) actor_optimizer.apply_gradients( zip(actor_grad, actor_model.trainable_variables) ) # We compute the loss and update parameters def learn(self): # Get sampling range record_range = min(self.buffer_counter, self.buffer_capacity) # Randomly sample indices batch_indices = np.random.choice(record_range, self.batch_size) # Convert to tensors state_batch = tf.convert_to_tensor(self.state_buffer[batch_indices]) action_batch = tf.convert_to_tensor(self.action_buffer[batch_indices]) reward_batch = tf.convert_to_tensor(self.reward_buffer[batch_indices]) reward_batch = tf.cast(reward_batch, dtype=tf.float32) next_state_batch = tf.convert_to_tensor(self.next_state_buffer[batch_indices]) self.update(state_batch, action_batch, reward_batch, next_state_batch) # This update target parameters slowly # Based on rate `tau`, which is much less than one. @tf.function def update_target(target_weights, weights, tau): for (a, b) in zip(target_weights, weights): a.assign(b * tau + a * (1 - tau)) def get_actor(): # Initialize weights between -3e-3 and 3-e3 last_init = tf.random_uniform_initializer(minval=-0.003, maxval=0.003) inputs = layers.Input(shape=(num_states,)) out = layers.Dense(256, activation="relu")(inputs) out = layers.Dense(256, activation="relu")(out) outputs = layers.Dense(1, activation="tanh", kernel_initializer=last_init)(out) # Our upper bound is 2.0 for Pendulum. outputs = outputs * upper_bound model = tf.keras.Model(inputs, outputs) return model def get_critic(): # State as input state_input = layers.Input(shape=(num_states)) state_out = layers.Dense(16, activation="relu")(state_input) state_out = layers.Dense(32, activation="relu")(state_out) # Action as input action_input = layers.Input(shape=(num_actions)) action_out = layers.Dense(32, activation="relu")(action_input) # Both are passed through seperate layer before concatenating concat = layers.Concatenate()([state_out, action_out]) out = layers.Dense(256, activation="relu")(concat) out = layers.Dense(256, activation="relu")(out) outputs = layers.Dense(1)(out) # Outputs single value for give state-action model = tf.keras.Model([state_input, action_input], outputs) return model def policy(state, noise_object): sampled_actions = tf.squeeze(actor_model(state)) noise = noise_object() # Adding noise to action sampled_actions = sampled_actions.numpy() + noise # We make sure action is within bounds legal_action = np.clip(sampled_actions, lower_bound, upper_bound) return [np.squeeze(legal_action)] std_dev = 0.2 ou_noise = OUActionNoise(mean=np.zeros(1), std_deviation=float(std_dev) * np.ones(1)) actor_model = get_actor() critic_model = get_critic() target_actor = get_actor() target_critic = get_critic() # Making the weights equal initially target_actor.set_weights(actor_model.get_weights()) target_critic.set_weights(critic_model.get_weights()) # Learning rate for actor-critic models critic_lr = 0.002 actor_lr = 0.001 critic_optimizer = tf.keras.optimizers.Adam(critic_lr) actor_optimizer = tf.keras.optimizers.Adam(actor_lr) total_episodes = 100 # Discount factor for future rewards gamma = 0.99 # Used to update target networks tau = 0.005 buffer = Buffer(50000, 64) # To store reward history of each episode ep_reward_list = [] # To store average reward history of last few episodes avg_reward_list = [] # Takes about 4 min to train for ep in range(total_episodes): prev_state = env.reset() episodic_reward = 0 while True: # Uncomment this to see the Actor in action # But not in a python notebook. # env.render() tf_prev_state = tf.expand_dims(tf.convert_to_tensor(prev_state), 0) action = policy(tf_prev_state, ou_noise) # Recieve state and reward from environment. state, reward, done, info = env.step(action) buffer.record((prev_state, action, reward, state)) episodic_reward += reward buffer.learn() update_target(target_actor.variables, actor_model.variables, tau) update_target(target_critic.variables, critic_model.variables, tau) # End this episode when `done` is True if done: break prev_state = state ep_reward_list.append(episodic_reward) # Mean of last 40 episodes avg_reward = np.mean(ep_reward_list[-40:]) print("Episode * {} * Avg Reward is ==> {}".format(ep, avg_reward)) avg_reward_list.append(avg_reward) # Plotting graph # Episodes versus Avg. Rewards plt.plot(avg_reward_list) plt.xlabel("Episode") plt.ylabel("Avg. Epsiodic Reward") plt.show() # Save the weights actor_model.save_weights("pendulum_actor.h5") critic_model.save_weights("pendulum_critic.h5") target_actor.save_weights("pendulum_target_actor.h5") target_critic.save_weights("pendulum_target_critic.h5") How to test above code
d4a2157143ea483a7fb858ef1911412b
{ "intermediate": 0.3031712472438812, "beginner": 0.5043735504150391, "expert": 0.19245514273643494 }
37,731
import gym import tensorflow as tf from tensorflow.keras import layers import numpy as np import matplotlib.pyplot as plt problem = “Pendulum-v1” env = gym.make(problem) num_states = env.observation_space.shape[0] print(“Size of State Space -> {}”.format(num_states)) num_actions = env.action_space.shape[0] print(“Size of Action Space -> {}”.format(num_actions)) upper_bound = env.action_space.high[0] lower_bound = env.action_space.low[0] print(“Max Value of Action -> {}”.format(upper_bound)) print(“Min Value of Action -> {}”.format(lower_bound)) class OUActionNoise: def init(self, mean, std_deviation, theta=0.15, dt=1e-2, x_initial=None): self.theta = theta self.mean = mean self.std_dev = std_deviation self.dt = dt self.x_initial = x_initial self.reset() def call(self): # Formula taken from https://www.wikipedia.org/wiki/Ornstein-Uhlenbeck_process. x = ( self.x_prev + self.theta * (self.mean - self.x_prev) * self.dt + self.std_dev * np.sqrt(self.dt) * np.random.normal(size=self.mean.shape) ) # Store x into x_prev # Makes next noise dependent on current one self.x_prev = x return x def reset(self): if self.x_initial is not None: self.x_prev = self.x_initial else: self.x_prev = np.zeros_like(self.mean) class Buffer: def init(self, buffer_capacity=100000, batch_size=64): # Number of “experiences” to store at max self.buffer_capacity = buffer_capacity # Num of tuples to train on. self.batch_size = batch_size # Its tells us num of times record() was called. self.buffer_counter = 0 # Instead of list of tuples as the exp.replay concept go # We use different np.arrays for each tuple element self.state_buffer = np.zeros((self.buffer_capacity, num_states)) self.action_buffer = np.zeros((self.buffer_capacity, num_actions)) self.reward_buffer = np.zeros((self.buffer_capacity, 1)) self.next_state_buffer = np.zeros((self.buffer_capacity, num_states)) # Takes (s,a,r,s’) obervation tuple as input def record(self, obs_tuple): # Set index to zero if buffer_capacity is exceeded, # replacing old records index = self.buffer_counter % self.buffer_capacity self.state_buffer[index] = obs_tuple[0] self.action_buffer[index] = obs_tuple[1] self.reward_buffer[index] = obs_tuple[2] self.next_state_buffer[index] = obs_tuple[3] self.buffer_counter += 1 # Eager execution is turned on by default in TensorFlow 2. Decorating with tf.function allows # TensorFlow to build a static graph out of the logic and computations in our function. # This provides a large speed up for blocks of code that contain many small TensorFlow operations such as this one. @tf.function def update( self, state_batch, action_batch, reward_batch, next_state_batch, ): # Training and updating Actor & Critic networks. # See Pseudo Code. with tf.GradientTape() as tape: target_actions = target_actor(next_state_batch, training=True) y = reward_batch + gamma * target_critic( [next_state_batch, target_actions], training=True ) critic_value = critic_model([state_batch, action_batch], training=True) critic_loss = tf.math.reduce_mean(tf.math.square(y - critic_value)) critic_grad = tape.gradient(critic_loss, critic_model.trainable_variables) critic_optimizer.apply_gradients( zip(critic_grad, critic_model.trainable_variables) ) with tf.GradientTape() as tape: actions = actor_model(state_batch, training=True) critic_value = critic_model([state_batch, actions], training=True) # Used -value as we want to maximize the value given # by the critic for our actions actor_loss = -tf.math.reduce_mean(critic_value) actor_grad = tape.gradient(actor_loss, actor_model.trainable_variables) actor_optimizer.apply_gradients( zip(actor_grad, actor_model.trainable_variables) ) # We compute the loss and update parameters def learn(self): # Get sampling range record_range = min(self.buffer_counter, self.buffer_capacity) # Randomly sample indices batch_indices = np.random.choice(record_range, self.batch_size) # Convert to tensors state_batch = tf.convert_to_tensor(self.state_buffer[batch_indices]) action_batch = tf.convert_to_tensor(self.action_buffer[batch_indices]) reward_batch = tf.convert_to_tensor(self.reward_buffer[batch_indices]) reward_batch = tf.cast(reward_batch, dtype=tf.float32) next_state_batch = tf.convert_to_tensor(self.next_state_buffer[batch_indices]) self.update(state_batch, action_batch, reward_batch, next_state_batch) # This update target parameters slowly # Based on rate tau, which is much less than one. @tf.function def update_target(target_weights, weights, tau): for (a, b) in zip(target_weights, weights): a.assign(b * tau + a * (1 - tau)) def get_actor(): # Initialize weights between -3e-3 and 3-e3 last_init = tf.random_uniform_initializer(minval=-0.003, maxval=0.003) inputs = layers.Input(shape=(num_states,)) out = layers.Dense(256, activation=“relu”)(inputs) out = layers.Dense(256, activation=“relu”)(out) outputs = layers.Dense(1, activation=“tanh”, kernel_initializer=last_init)(out) # Our upper bound is 2.0 for Pendulum. outputs = outputs * upper_bound model = tf.keras.Model(inputs, outputs) return model def get_critic(): # State as input state_input = layers.Input(shape=(num_states)) state_out = layers.Dense(16, activation=“relu”)(state_input) state_out = layers.Dense(32, activation=“relu”)(state_out) # Action as input action_input = layers.Input(shape=(num_actions)) action_out = layers.Dense(32, activation=“relu”)(action_input) # Both are passed through seperate layer before concatenating concat = layers.Concatenate()([state_out, action_out]) out = layers.Dense(256, activation=“relu”)(concat) out = layers.Dense(256, activation=“relu”)(out) outputs = layers.Dense(1)(out) # Outputs single value for give state-action model = tf.keras.Model([state_input, action_input], outputs) return model def policy(state, noise_object): sampled_actions = tf.squeeze(actor_model(state)) noise = noise_object() # Adding noise to action sampled_actions = sampled_actions.numpy() + noise # We make sure action is within bounds legal_action = np.clip(sampled_actions, lower_bound, upper_bound) return [np.squeeze(legal_action)] std_dev = 0.2 ou_noise = OUActionNoise(mean=np.zeros(1), std_deviation=float(std_dev) * np.ones(1)) actor_model = get_actor() critic_model = get_critic() target_actor = get_actor() target_critic = get_critic() # Making the weights equal initially target_actor.set_weights(actor_model.get_weights()) target_critic.set_weights(critic_model.get_weights()) # Learning rate for actor-critic models critic_lr = 0.002 actor_lr = 0.001 critic_optimizer = tf.keras.optimizers.Adam(critic_lr) actor_optimizer = tf.keras.optimizers.Adam(actor_lr) total_episodes = 100 # Discount factor for future rewards gamma = 0.99 # Used to update target networks tau = 0.005 buffer = Buffer(50000, 64) # To store reward history of each episode ep_reward_list = [] # To store average reward history of last few episodes avg_reward_list = [] # Takes about 4 min to train for ep in range(total_episodes): prev_state = env.reset() episodic_reward = 0 while True: # Uncomment this to see the Actor in action # But not in a python notebook. # env.render() tf_prev_state = tf.expand_dims(tf.convert_to_tensor(prev_state), 0) action = policy(tf_prev_state, ou_noise) # Recieve state and reward from environment. state, reward, done, info = env.step(action) buffer.record((prev_state, action, reward, state)) episodic_reward += reward buffer.learn() update_target(target_actor.variables, actor_model.variables, tau) update_target(target_critic.variables, critic_model.variables, tau) # End this episode when done is True if done: break prev_state = state ep_reward_list.append(episodic_reward) # Mean of last 40 episodes avg_reward = np.mean(ep_reward_list[-40:]) print(“Episode * {} * Avg Reward is ==> {}”.format(ep, avg_reward)) avg_reward_list.append(avg_reward) # Plotting graph # Episodes versus Avg. Rewards plt.plot(avg_reward_list) plt.xlabel(“Episode”) plt.ylabel(“Avg. Epsiodic Reward”) plt.show() # Save the weights actor_model.save_weights(“pendulum_actor.h5”) critic_model.save_weights(“pendulum_critic.h5”) target_actor.save_weights(“pendulum_target_actor.h5”) target_critic.save_weights(“pendulum_target_critic.h5”) How to test above code how we are using pendulum_target_critic.h5 and pendulum_target_actor.h5
2fa35d738a964f6f5de41522f7a9ce39
{ "intermediate": 0.29667043685913086, "beginner": 0.3607635796070099, "expert": 0.34256604313850403 }
37,732
need 2 social media linkedin in post on these 2 links with hashtags and emjis - https://www.livemint.com/news/india-signs-cooperation-agreements-with-the-uae-czech-republic-11704889847579.html https://techcrunch.com/2024/01/10/sec-approves-spot-bitcoin-etf/
1712411fca193454e2d218142441884c
{ "intermediate": 0.35583770275115967, "beginner": 0.2580370604991913, "expert": 0.38612520694732666 }
37,733
this is my data: [ 0 500 1000 1500 2000 2500 3000 3500 4000 4500 ... \ codon_pos ... 0 48.0 27.0 32.0 26.0 45.0 63.0 45.0 17.0 63.0 61.0 ... 1 56.0 18.0 43.0 21.0 37.0 54.0 54.0 19.0 64.0 79.0 ... 2 55.0 21.0 34.0 13.0 39.0 46.0 39.0 21.0 74.0 63.0 ... 55500 56000 56500 57000 57500 58000 58500 59000 59500 \ codon_pos 0 561.0 514.0 638.0 684.0 689.0 617.0 531.0 594.0 588.0 1 506.0 469.0 594.0 621.0 635.0 541.0 478.0 518.0 519.0 2 584.0 525.0 634.0 681.0 710.0 606.0 534.0 580.0 589.0 some dfs does not have the complete number of columns (some of them lack the column "2000" for example). I want to calculate the mean of each column at each row. for all dfs. If a df does not have that column, it does not sum anything and is not counted for the mean.
3653192295fcd9a82e25ee54ad226ecc
{ "intermediate": 0.3701831102371216, "beginner": 0.3941418528556824, "expert": 0.23567505180835724 }
37,734
cannot import name 'ddg' from 'duckduckgo_search'
db5aafdaf91754e7dda0a21c698beffc
{ "intermediate": 0.36507102847099304, "beginner": 0.2627662420272827, "expert": 0.37216275930404663 }
37,735
下面sql:SELECT user_id, user_en, user_pass, user_cn, nick_name, gender, avatar, admin_type FROM t_base_user where user_en = 'jiangqihong';请写出修改字段admin_type的sql
1333116c355adffa7292c01b3af6e74f
{ "intermediate": 0.42617401480674744, "beginner": 0.27155908942222595, "expert": 0.302266925573349 }
37,736
i work with library datasets from huggingface and i need do make augmentations for my image dataset, how can i do that with pytorch and datasets
5155d89395a9dbe9465196547a586872
{ "intermediate": 0.8575415015220642, "beginner": 0.028893306851387024, "expert": 0.11356523633003235 }
37,737
请详细解释以下代码:package controller import ( "computingDeviceFinanceSvr/com" "computingDeviceFinanceSvr/constants" "computingDeviceFinanceSvr/model" "computingDeviceFinanceSvr/service" "computingDeviceFinanceSvr/utility" "net/http" "github.com/gin-gonic/gin" ) type BillCtl struct { } func NewBillCtl() *BillCtl { return &BillCtl{} } func (c *BillCtl) BillQuery(ctx *gin.Context) { req := model.BillQueryIn{} if err := ctx.ShouldBind(&req); err != nil { com.Log.Error("param Bind failed err :%v", err) utility.RespErrAbort(ctx, http.StatusOK, com.ErrParam) return } // 获取查询用户账号(邮箱) email := ctx.Request.Header.Get(constants.RequestHeaderUser) if email == "" { com.Log.Error("request X-Onething-User is nil") utility.RespErrAbort(ctx, http.StatusOK, com.ErrParam) return } role, err := service.GetBillService().GetOperatorRole(email) if err != nil { utility.RespErrAbort(ctx, http.StatusOK, com.ErrServiceBusy) return } // 查询账单 data, errBill := service.GetBillService().GetBillInfo(&req, email, role) if errBill != nil { com.Log.Error("query bill info failed email :%v,req :%v err :%v", email, req, errBill) utility.RespErrAbort(ctx, http.StatusOK, com.ErrServiceBusy) return } utility.RespSuccessAbort(ctx, data) return }
4ee45aaf9a8099099b4cd6a0533f378c
{ "intermediate": 0.38350430130958557, "beginner": 0.38522887229919434, "expert": 0.2312667965888977 }
37,738
I have this code below for turning On 6 led (like led bar) regarding the reading value from a High power LED driver board #define F_CPU 16000000UL // Assuming a 16 MHz clock if using ATmega32A (adjust if different) #include <avr/io.h> #include <util/delay.h> // Define potentiometer ADC channel (PA0 = ADC0) #define potPin 0 // Define digital I/O pins for LEDs #define led1 PD4 #define led2 PD5 #define led3 PD6 #define led4 PD7 #define led5 PB0 #define led6 PB1 void adc_init() { // Initialize ADC ADMUX = (1<<REFS0); // Select AVcc as reference voltage and ADC0 as input channel ADCSRA = (1<<ADEN) | (1<<ADPS2) | (1<<ADPS1) | (1<<ADPS0); // Enable ADC and set prescaler to 128 } uint16_t adc_read(uint8_t ch) { // Select ADC channel with safety mask and without changing the reference voltage selection ADMUX = (ADMUX & 0xF0) | (ch & 0x0F); // Start single conversion ADCSRA |= (1<<ADSC); // Wait until conversion is complete while (ADCSRA & (1<<ADSC)); return ADC; } int main(void) { // Set up the LED pins as output DDRD |= (1<<led1) | (1<<led2) | (1<<led3) | (1<<led4); DDRB |= (1<<led5) | (1<<led6); // Initialize ADC adc_init(); while(1) { // Read the value from the potentiometer uint16_t potValue = adc_read(potPin); // Map the potentiometer value from (65 - 337) to (0 - 1023) // Adjust this mapping to your application needs uint16_t mappedValue = (uint32_t)(potValue - 65) * 1023 / (337 - 65); // Create additional thresholds for 6 LEDs uint16_t threshold1 = 170; // First threshold uint16_t threshold2 = 341; // Second threshold uint16_t threshold3 = 512; // Midpoint uint16_t threshold4 = 683; // Fourth threshold uint16_t threshold5 = 854; // Fifth threshold // Turn off all LEDs to start PORTD &= ~((1<<led1) | (1<<led2) | (1<<led3) | (1<<led4)); PORTB &= ~((1<<led5) | (1<<led6)); // Determine which LEDs to light up based on the mappedValue if (mappedValue >= threshold5) { // In the highest range, light up all six LEDs PORTD |= (1<<led1) | (1<<led2) | (1<<led3) | (1<<led4); PORTB |= (1<<led5) | (1<<led6); } else if (mappedValue >= threshold4) { // Light up five LEDs PORTD |= (1<<led1) | (1<<led2) | (1<<led3) | (1<<led4); PORTB |= (1<<led5); } else if (mappedValue >= threshold3) { // Light up four LEDs PORTD |= (1<<led1) | (1<<led2) | (1<<led3) | (1<<led4); } else if (mappedValue >= threshold2) { // Light up three LEDs PORTD |= (1<<led1) | (1<<led2) | (1<<led3); } else if (mappedValue >= threshold1) { // Light up two LEDs PORTD |= (1<<led1) | (1<<led2); } else if (mappedValue > 0) { // Light up one LED PORTD |= (1<<led1); } // Small delay to reduce flickering _delay_ms(100); } } but there are some problems; first of all the LEDs are turned on and all of them blink when my potentiometer is at minimum and after turning a little bit of potentiometer the first led is turn on and we don't have any blinking , what could be problem ?
c2f18e69734b5d56db8d644e9fcea85f
{ "intermediate": 0.43615010380744934, "beginner": 0.29345324635505676, "expert": 0.27039670944213867 }
37,739
Accessibility guidelines and best practices for Tabs components
8fe696b9f6ba36a191a6bc79f296173d
{ "intermediate": 0.3233201503753662, "beginner": 0.32867491245269775, "expert": 0.34800490736961365 }
37,740
open_connection
a5ac1e9b598a93faa131b1d917d996c7
{ "intermediate": 0.32846754789352417, "beginner": 0.24797092378139496, "expert": 0.4235615134239197 }
37,741
Оригинальный сюжет игры Ben and Ed: Игра “Ben and Ed” - это трехмерный платформер, где игрок управляет зомби по имени Эд, который должен преодолевать множество препятствий и ловушек, чтобы спасти своего лучшего друга, мальчика по имени Бен. В этой игре, Эд участвует в гротескном телешоу, созданном человеком по имени Ханз Шоумастер. Шоу называется “Rundead”, где Эд должен пройти через серию зловещих препятствий и испытаний, чтобы в конце концов встретиться с Беном. Полный сюжет фильма “RUN ZOMBIE, RUN”: Акт 1: Введение в мир и персонажей Открытие фильма: • Фильм начинается в мирное время, в современном научном городе, где жизнь кипит и наука бурлит. • Эд – талантливый учёный, занятый исследованиями в области биотехнологий вместе со своей женой и коллегами. Они стоят на пороге прорыва: создания вакцины для вечной жизни и молодости. • Научный отдел в восторге от открытия, в лаборатории царит атмосфера торжества. После успешной презентации их эксперимента над крысами, Эд и его жена возвращаются домой, чтобы отпраздновать успех. Шторм наступает: • Интимные моменты между Эдом и его женой дома. Они обсуждают планы на будущее, их романтическая встреча переходит в страстный вечер. • Когда после утро голые с обьятием жены в постели,и через некоторое время начинают звонить в телефон эда и сообщают что в вашем лаборотории что-то случилось не хорошее. • Радость прерывается известием о трагедии: инфицированная крыса умерла и восстала как зомби, сбежав из лаборатории и начав заражение города. • Пока Эд и его жена отдыхают, город погружается в хаос. Зомби-вирус быстро распространяется, затрагивая школы, больницы, улицы. Трагический перелом: • На следующее утро Эд и его жена возвращаются в лабораторию и обнаруживают сбежавшую крысу. Новости сообщают о массовом заражении. • Они пытаются дозвониться до детей и родственников, но безуспешно. Улицы заполнены зомби. Лаборатория осаждена толпой заражённых. • После нападения зомби, Эда укусили, и он принимает экспериментальную вакцину. Он вынужден покинуть свою жену, которая жертвует собой, чтобы спасти его. Ядерный удар: • Эд, полузомби, узнаёт о намерениях властей сбросить на город ядерную бомбу. С помощью своих новых способностей он прячется в подземном бункере. • Бомба стирает город с лица земли, уничтожая всех его обитателей, как живых, так и мёртвых. Акт 2: Развитие сюжета и персонажей Эд выходит на поверхность: • После взрыва Эд, один из немногих выживших, покидает своё убежище. Мир, который он знал, больше не существует. • Он встречает Бена, мальчика-сироту, выжившего в руинах города. Между ними завязывается необычная дружба. Выживание в разрушенном мире: • Эд и Бен ищут припасы и остатки цивилизации, стараясь выжить среди развалин и постоянной угрозы зомби. • По мере их путешествия за руинами, Эд начинает терять остатки своего человечества и борется с внутренними демонами. Акт 3: Кульминация и разрешение конфликта Шоу “Rundead”: • Ханз Шоумастер, выживший продюсер, создаёт телешоу “Rundead” и похищает Бена, чтобы использовать его в качестве приза для Эда. • Эд, теперь полностью зомби, но сохранивший воспоминания о Бене, проходит через жестокие испытания шоу. Финальная битва и трагедия: • Эд освобождает Бена, но внезапно теряет остатки человечности и атакует его, не в силах сопротивляться своему зомби-инстинкту. • Бен превращается в зомби, и они уходят вместе, оставляя мир во власти хаоса и зомби-апокалипсиса. Заключение: • Фильм заканчивается на тёмной ноте. Эд и Бен, теперь оба зомби, сидят на разрушенном мосту, смотря на новый мир, который никогда не будет таким, каким был когда-то. Фильм “RUN ZOMBIE, RUN” показывает темную сторону человеческой изобретательности и последствия игры с природой. Со смешанными чувствами ужаса и сочувствия зрители следят за путешествием Эда от великого открытия до конечного падения в бездну живых мертвецов. Саундтреки будут браться из оригинальной игры BEN AND ED (2015 года) Дополнительные саундтреки: 1. Песня Yota - The Runner, финальная сцена, где зомби ED будет бежать c предпринимателем HANZ SHOWMASTER который создал величайшее в мире шоу и будут преодолевать препятствии. Зомби ЭД столкнет HANZ SHOWMASTER c площадки и он умирает. 2. Песня The Cranberries - Zombie, сцена, где весь механизм вышел из под контроля, испытания стали чрезвычайно сложными, и люди которые смотрят это шоу начинают убегать от атаков зомби. Время продолжает заканчиваться и платформа в очень скором времени падает в лаву. Зомби ED берет в себя в руки и начинает быстро преодолевать все препятствия под эту песню за одну минуту, пока время полностью не истекало. Последние секунды заканчивается и платформа начинает падать в лаву. ED перепрыгивает из платформы и ухватывается руками на землю. А дальше он старается подняться из земли, чтобы не упасть в раскаленную лаву. Краткий план сценария адаптированой игры “BEN AND ED” для фильма “RUN ZOMBIE, RUN” с продолжительностью около 145 минут: Акт 1: Введение в мир и персонажей (40 страниц) • Описание мира и введение Эда (5 страниц) • Открытие и презентация эксперимента (9 страниц) • Радостное возвращение домой и трагическое известие (6 страниц) • Заражение города и нападение зомби (5 страниц) • Укус Эда и жертва его жены (5 страниц) Акт 2: Развитие сюжета и персонажей (30 страниц) • Эд в бункере, ядерный удар (10 страниц) • Встреча Эда и Бена, начало дружбы (5 страниц) • Выживание в разрушенном мире, внутренняя борьба Эда (5 страниц) • Введение Ханза Шоумастера и его телешоу (5 страниц) • Похищение Бена и начало шоу (5 страниц) Акт 3: Кульминация и разрешение конфликта (75 страниц) • Прохождение Эдом испытаний шоу (40 страниц) • Финальная битва и заключительные сцены (20 страниц) • Превращение Бена в зомби и душераздирающий финал (15 страниц) 1. Описание мира и введение Эда (5 страниц): Часть 1: Описание мира (Первая страница) Экстерьер. Научный городок. День. Первый план даёт визуальный обзор современного научного города, где здания усеяны солнечными панелями и зелеными крышами, а улицы наполнены как профессиональными учёными, так и студентами. Камера ненадолго останавливается на информационных табло, транслирующих последние научные достижения. Звуковой фон наполнен смесью городской суеты и приглушённой атмосферой исследовательских центров. Часть 2: Введение главного героя Эда (Вторая страница) Интерьер. Лаборатория Эда. День. Камера плавно переходит внутрь одного из исследовательских центров, где в главной роли — Эд, сосредоточенный учёный средних лет, одет в лабораторный халат. Он изучает данные на экране монитора и записывает что-то в блокнот. Вокруг — атрибуты научного занятия: микроскопы, колбы с различными реагентами, жужжание оборудования. Его коллеги в фоне занимаются собственными исследованиями. Часть 3: Эда хвалят за работу (Третья страница) Интерьер. Конференц-зал. День. Эд выступает перед коллегами и представителями научного сообщества, гордо демонстрируя результаты экспериментов — здоровую, активную крысу в стеклянном контейнере. Овация. Вдали стоит его жена, Мария, улыбаясь и аплодируя вместе с остальными. После презентации Эд и Мария обмениваются значимыми взглядами. У них в глазах — отсветы будущего успеха и признания. Часть 4: Торжественное возвращение домой (Четвертая страница) Интерьер. Гостиная Эда и Марии. Вечер. Эд и Мария возвращаются домой. В гостиной тепло и радостно; они обнимаются, празднуя сегодняшнюю победу. Камера ловит детали: разбросанные повсюду статьи о научных открытиях, поздравительные письма и сертификаты. Они решают, что остаток вечера принадлежит только им двоем, позволяя себе редкий момент расслабления. Часть 5: Ночные происшествия (Пятая страница) Интерьер. Лаборатория Эда. Ночь. Лаборатория пуста и тиха. Тот момент ночного затишья прерывается тревожной алармой. Камеры безопасности захватывают крысу, которая резко поднимается после краткосрочной смерти, оживая заново в устрашающем зловещем свете. Экраны мониторов отражают её неестественные движения, тем самым намекая на надвигающуюся бурю. 2. Открытие и презентация эксперимента (9 страниц): Часть 6: Вводный монтаж (Шестая страница) Экстерьер и интерьер научного комплекса. Ученые заняты исследованиями, работают с образцами тканей и микроскопами. Эд и его команда работают над последними приготовлениями к презентации своего проекта о “вечной жизни”. Часть 7: Эд и команда завершают подготовку (Седьмая страница) Внутри лаборатории. Эд напутствует команду, даёт последние указания и производит финальные проверки оборудования. Взаимодействие с персонажами показывает их характеры и дедикацию. Часть 8: Прибытие гостей (Восьмая страница) Команда встречает приглашенных ученых, инвесторов и прессы, которые прибывают на презентацию. Здесь мы видим Марию, жену Эда, которая также ученый и его главная поддержка. Часть 9: Начало презентации (Девятая страница) Эд ведет вводную речь, поясняя значение и цели исследования. Отрывок подчеркивает оптимизм и ожидания от возможного научного прорыва. Часть 10: Демонстрация эксперимента (Десятая страница) Кульминационный момент - эксперимент “вечной жизни” на крысах. Эд демонстрирует результаты эксперимента, подчеркивая важность открытия и его потенциальные применения. Часть 11: Ответы на вопросы (Одинадцатая страница) После успешной демонстрации Эд и команда отвечают на вопросы аудитории, поднимая этим самым разговоры о этических и моральных аспектах исследования. Часть 12: Торжественный прием (Двенадцатая страница) Эд, Мария и коллеги участвуют в приёме после презентации, получая поздравления и обсуждая будущее проекта. Намеки на интригу и потенциальные проблемы закладываются в переговоры. Часть 13: Интимные моменты Эда и Марии (Тренадцатая страница) В лаборатории после приёма. Эд и Мария делятся личными мечтами и надеждами на будущее, их взаимопонимание и любовь подчёркивается в моменте тишины после шумного дня. Часть 14: Закрытие лаборатории и возвращение домой (Четырнадцатая страница) Эд и Мария закрывают лабораторию и уезжают домой, обсуждая, как они будут распоряжаться успехом и планируют свой вечер вместе. 3. Радостное возвращение домой и трагическое известие (6 страниц): Часть 15: Возвращение домой (Пятнадцатая страница) Экстерьер. Дом Эда. Вечер. Эд и Мария приезжают домой на автомобиле. Они обсуждают презентацию и выражают надежды на будущее. Еще в машине они шутят и смеются, радостные и взволнованные. Жизнь кажется им полной возможностей. Часть 16: Романтический вечер (Шестнадцатая страница) Интерьер. Гостиная дома Эда. Вечер Дома Эд и Мария проводят время вдвоем, ужинают при свечах и делятся мечтами о том, чего они достигнут благодаря своему открытию. Атмосфера их интимности и взаимной любви подчеркиваются теплотой домашнего очага. Часть 17: Предвестия неприятностей (Семнадцатая страница) Интерьер. Гостиная дома Эда. Вечер Начинается страстная ночь, целуясь они направляются в свою спальню, показывая кадры как они друг другу снимают свои одежды, и они голыми продолжают заниматься сексом… Глубокой ночью крыса в лаборатории умирает. Камеры безопасности захватывают этот момент, начиная цепочку событий, которая приведет к катастрофе с зомби-инфекцией Часть 18: Неожиданное известие (Восемнадцатая страница) Интерьер. Спальня Эда и Марии. Ночь После страстной ночи телефонный звонок прерывает тишину. Громкий звонок телефона нарушает идиллию, Эд отвечает и слышит тревожные новости от коллеги о происшествии в лаборатории. Часть 19: Первые признаки катастрофы (Девятнадцатая страница) Интерьер. Лаборатория. Ночь Через вставки мы видим, что произошло в лаборатории — камеры наблюдения показывают мертвую крысу, которая внезапно оживает и таинственным образом проникает наружу, начиная бесконтрольное распространение зомби-вируса. Часть 20: Реакция на новость (Двадцатая страница) Интерьер. Спальня Эда и Марии. Ночь Эд и Мария переходят от испуга к действию. Эд решает, что им необходимо немедленно вернуться в лабораторию, чтобы выяснить, что именно произошло. Мария, чувствуя страх за будущее своей семьи, поддерживает его решение. 4. Заражение города и нападение зомби (5 страниц): Часть 21: Ночное оживление (Двадцатая первая страница) Интерьер/Экстерьер. Лаборатория. Ночь. Тихая и пустая лаборатория внезапно оживает тревожными звуками сигнализации. Камеры безопасности зафиксировали странные движения – крыса мертва, но её клетка пуста. Мы видим, как темная фигура проникает в здание, заражая первых ничего не подозревающих охранников. Часть 22: Беспорядок и хаос (Двадцатая вторая страница) Экстерьер. Улицы города. Раннее утро. Под покровом ночи, крыса и зараженные начинают бесшумно проникать в различные уголки города. На заброшенной улице мы видим, как одиночные зомби атакуют бомжа. В темноте прозвучали испуганные крики. Часть 23: Распространение вируса (Двадцатая третья страница) Интерьер. Больница. Рассвет. Через окно больницы просачивается первый свет утренней зари, внутри царит хаос и паника. Доктора и медсестры бегут между палат, пытаясь остановить распространение заражения, которое уже достигло критической точки. Часть 24: Общественный ответ (Двадцатая четвертая страница) Экстерьер. Городская площадь. Утро. Новостные отряды на площади транслируют экстренные сообщения, призывая граждан оставаться в своих домах. Люди в панике пытаются найти убежище, но в толпе начинают появляться зомби, устроившие настоящую резню. Часть 25: Цена пандемии (Двадцатая пятая страница) Экстерьер/Интерьер. Городские улицы и дома. Утро. Утренние новости показывают масштабы катастрофы: множество мертвых, большие территории охвачены вирусом. Военные блокпосты установлены по всему городу, пытаясь остановить волну зомби. Раскаты полицейских сирен и вертолетов заполняют воздух, обозначая повышение степени угрозы. 5. Укус Эда и жертва его жены (5 страниц): Часть 26: Нападение в лаборатории (Двадцатая шестая страница) Интерьер. Лаборатория Эда. Утро. Эд и Мария входят в разгромленную лабораторию. Следы борьбы и разрушения повсюду. Они делятся впечатлениями от увиденного и стараются осмыслить ситуацию. Эд замечает, что крыса, над которой они проводили эксперименты, исчезла из клетки. Часть 27: Раскрытие масштаба катастрофы (Двадцатая седьмая страница) Интерьер. Комната службы безопасности лаборатории. Ревизия видеозаписей системы слежения показывает момент, когда крыса оживает и превращается в зомби, после чего нападает на охранников. Встреча Эда и Марии со своим коллегой, который сообщает о пропаже детей и о том, что заражение распространяется по городу. Часть 28: Первый контакт с зомби (Двадцатая восьмая страница) Интерьер. Коридоры лаборатории. В поисках дополнительной информации Эд и Мария сталкиваются с группой зомби. Отчаянная схватка. Эд защищает Марию, но в ходе побега получает укус. Мария помогает ему укрыться в безопасном месте. Часть 29: Жертва ради спасения (Двадцатая девятая страница) Интерьер. Секретная комната в лаборатории. Осознав, что Эд заражен, Мария берет дело в свои руки. Она находит экспериментальное лекарство и в последнюю минуту заставляет Эда его принять. Когда зомби врываются в комнату, Мария отвлекает их на себя, позволяя Эду сбежать. Часть 30: Последний шанс (Тридцатая страница) Экстерьер. Задний выход лаборатории. Утро. Эд, преображающийся из человека в зомби, выпрыгивает из окна, сознавая, что это его единственный шанс на выживание. В это время, Мария сражается до последнего вдоха. Эд, теперь силён как зомби, начинает свой путь в опустевший город, полный решимости найти своих детей. 6. Эд в бункере, ядерный удар (10 страниц) Часть 31: Поиски в школе и путь к бункеру (Тридцатая первая страница) Эд, отчаянно пытаясь не привлечь внимание, скрывает свою внешность и пробирается в школу своих детей. Он осторожно входит в опустевшее здание, настораживаясь при каждом шорохе. Пустые коридоры и классы наполнены тяжелым молчанием и зомби, бродящими среди парт. Эд тщательно их избегает, ища любые знаки нахождения своих детей, но увы, не находит никаких следов. Понимая, что власти рано или поздно начнут действовать, он спешно отправляется к своему бункеру, где он может укрыться и спланировать следующие шаги. Следующая сцена показывает Эда, уже внутри бункера, где он прячется от возможных поисков правительством. В полном одиночестве, окруженный лишь стенами бункера и эхом собственных мыслей, он пытается направить свою решимость на поиск нового пути для выживания, когда мир там, наружу, рушится под натиском невиданных опасностей. Часть 32: (Тридцатая вторая страница) Эд слышит далекий взрыв. Глубокая тревога вырисовывается на его лице при появлении первых признаков ядерного удара. Часть 33: (Тридцатая третья страница) Проходит сирена воздушной тревоги и экраны мониторов в бункере начинают показывать новости о надвигающемся ядерном ударе. Часть 34: (Тридцатая четвертая страница) Эд успешно взламывает систему бункера, чтобы получить больше информации, и его страх и отчаяние нарастают с пониманием ситуации. Часть 35: (Тридцатая пятая страница) Изоляция и безмолвие бункера нарушаются звуками взрывов на поверхности, которые становятся все ближе и громче. Часть 36: (Тридцатая шестая страница) Внезапно, бункер сотрясается от ударной волны, и Эд падает на пол, пока все вокруг него начинает рушиться и трястись. Часть 37: (Тридцатая седьмая страница) Эд борется, чтобы подняться, испытывая смешанные чувства отчаяния и желания выжить. Часть 38: (Тридцатая восьмая страница) Эд оказывается на полу, покрытом пылью и обломками. Бункер содрогается от дальнего взрыва, звуки ударной волны проносятся сквозь плотные стены. Эд борется, чтобы встать на колени, его дыхание тяжелое, а каждый вздох наполнен болью и усталостью. На теле Эда видны следы его физической и ментальной борьбы; кожа на руках покрывается странными отметинами, знаками его зомби-состояния. Но он не сдается, толкаемый инстинктом выживания и любовью к своей семье, которую он должен найти вне зависимости от своего нового облика. Фонари аварийного освещения мигают в последнем акте отчаяния, когда Эд, собирая остатки сил, начинает осматривать бункер в поисках инструментов и предметов, которые помогут ему добраться до коммуникационного оборудования. Он нащупывает тяжелый кусок металла, который может послужить орудием для пролома стены или двери. Каждый его шаг громыхает по металлическому полу, создавая дополнительный шум в тишине, которая теперь царит в бункере. Эд начинает использовать найденный металлический предмет в качестве лома, упорно пытаясь открыть запертый люк в стене бункера. Концентрация и злость видны на его лице, когда он медленно но верно преодолевает препятствия, которые стоят между ним и свободой. Каждый удар о глухие металлические стены бункера - это эхо его нежелания признать поражение перед лицом конца света. Часть 39: (Тридцатая девятая страница) Он принимает решительные меры, используя инструменты и здравый смысл, чтобы проложить путь сквозь обломки. Часть 40: (Сороковая страница) Сцена завершается характерным поворотом: Эд находит узкий проход и чувствует поток свежего воздуха, указывающий на возможность выхода наружу в послеапокалиптический мир. 7. Встреча Эда и Бена, начало дружбы (5 страниц) Часть 41: Встреча (Сорок первая страница) Эд, блуждающий в руинах после удара, услышит детский плач, который приведёт его в заброшенную часть города. Он обнаружит Бена, мальчика, который прячется от зомби, окружённого разрушенными развалинами дома, в котором он был вместе со своей семьей. Часть 42: Спасение (Сорок вторая страница) Зомби обнаруживают укрытие Бена и начинают наступать. Эд решительно заступится за мальчика, используя свои новые силы зомби для того, чтобы защитить его. Показать момент, когда Эд осознаёт, что, несмотря на своё превращение, он всё ещё способен на добрые поступки. Часть 43: Первый разговор (Сорок третья страница) После спасения Бена, Эд осторожно пытается с ним подружиться, несмотря на свою страшную внешность. Мальчик изначально напуган, но их общая нужда в выживании и отсутствие других людей начнут их сближать. Часть 44: Взаимопомощь (Сорок четвертая страница) Зомби обнаруживают укрытие Бена и начинают наступать. Эд решительно заступится за мальчика, используя свои новые силы зомби для того, чтобы защитить его. Показать момент, когда Эд осознаёт, что, несмотря на своё превращение, он всё ещё способен на добрые поступки. Часть 45: Новые горизонты (Сорок пятая страница) Эд и Бен, объединившиеся обстоятельствами и новообретенной дружбой, решают отправиться в поисках других возможных выживших и места, где можно начать новую жизнь. Они находят зацепки, которые указывают на группу выживших, собирающихся в одном из немногих безопасных районов, который избежал разрушения. С этим новым мерцанием надежды глава закрывается на их решимости вместе столкнуться с опустошенным послеапокалиптическим миром, исследуя новые горизонты. 8. Выживание в разрушенном мире, внутренняя борьба Эда (5 страниц) Часть 46: (Сорок шестая страница) Эд пробуждается в своем убежище, ужасающая тишина разрушенного мира обрушивается на него. Он проводит свои дни, исследуя бункер, составляя запасы и пытаясь придумать план выживания. Иногда он выходит наружу, чтобы найти еду и воду, стараясь избегать встречи с любыми живыми существами, будь то человек или зомби. Его лицо — это маска одиночества и решимости. .Часть 47: (Сорок седьмая страница) Сражаясь с голодом и постоянной угрозой, Эд сталкивается со своим страхом и сожалением. Он обдумывает свои действия и выбор, осознавая, что мир, который он знал, больше не существует. В моменты отчаяния он вспоминает свою жену и детей, мучаясь вопросами о том, что стало с ними. Часть 48: (Сорок восьмая страница) Находя небольшой радиоприемник, Эд ловит фрагменты передач, которые дают ему надежду на то, что где-то там могут быть другие выжившие. Он решает, что его следующим шагом будет попытка связаться с ними. В спокойной обстановке своего убежища, он начинает ремонтировать радиоприемник. Часть 49: (Сорок девятая страница) Эд преодолевает различные внутренние демоны и сомнения, силой воли заставляя себя двигаться вперед. Он приспосабливается к новой жестокой реальности, становится хитрее и изобретательнее. Всплески гнева и сплетения вины сменяются моментами ясности и напоминаниями о человечности. Часть 50: (Пятидесятая страница) После одного из своих вылазок наружу, Эд возвращается в бункер с ценными ресурсами только для того, чтобы застать его врасплох - лицом к лицу с маленьким мальчиком, который заплутал в развалинах. Эд сталкивается с выбором: избегать контактов, как он делал до сих пор, или начать заботиться о ребенке и образовать союз, который может стать началом новой, непредсказуемой главы в их жизни после катастрофы. 9. Введение Ханза Шоумастера и его телешоу (5 страниц) Часть 51: Падение бизнеса Ханза Шоумастера (Пятьдесят первая страница) Интерьер. Заброшенная телестудия. День. Камера обводит остатки залитой заревом восхода заброшенной телестудии, освещая обветшалые декорации и выцветшие плакаты. За столом уставлено кресло, куда усаживается Ханз Шоумастер, выживший после ядерного удара и нападения зомби, который добрался в этот город издалека. Его глаза блуждают по счетам и документам, свидетельствующим о начале конца его когда-то процветающей империи развлекательных шоу. На лице Ханза сменяются отчаяние, неодобрение и затем последнее, но недюжинное решение — восстать против судьбы… Часть 52: Зарождение идеи нового шоу (Пятьдесят вторая страница) Крупным планом показано, как взор Ханза останавливается на статье о зомби-пандемии. Идея зарождается в его глазах; камера ловит момент вдохновения, когда он строчит заметки на полях газеты. Желание сделать что-то радикальное и захватывающее оживляет его. Часть 53: Подготовка плана (Пятьдесят третья страница) Интерьер. Кабинет Ханза Шоумастера. Ночь. Ханз работает в своем кабинете в глубине ночи, окруженный плакатами и телеэкранами с новостями о зомби. Он создает концепт нового телешоу “Rundead”. Его глаза пылают азартом, он погружен в процесс, не обращая внимания на окружающий хаос. Часть 54: Рекламная кампания Пятьдесят четвертая страница) Интерьер. Телестудия. Рабочий кабинет. Утро. Ханз погружен в работу над рекламными афишами своего нового шоу “Rundead.” Его творческая страсть не знает границ, и он мечтает об аудитории за пределами разрушенного города. На экране его компьютера вспыхивают макеты, предназначенные к показу через глобальную сеть и спутниковые каналы. Каждое нажатие клавиши — это шанс заявить миру, что Шоумастер возвращается в игру. Часть 55: Поиски потенциальных участников (Пятьдесят пятая страница) Интерьер. Телестудия. Вечер. Камера движется к экрану за пультом управления Ханза, где видны кадры из потокового видео в социальных сетях и на независимых платформах. Видны люди, пытающиеся выжить в разрушенном мире, их устремления и борьба за жизнь. Ханз зачарован наблюдениями за потенциальными героями своего шоу, записывая имена самых выразительных и дерзких личностей, обладающих харизмой и силой духа, даже когда мир вокруг них разваливается на куски. 10. Похищение Бена и начало шоу (5 страниц) Часть 56: План похищения Бена и Эда (Пятьдесят шестая страница) Интерьер. Передвижной операционный центр Ханза Шоумастера. День. Ханз Шоумастер разрабатывает план похищения Бена и Эда. В своем передвижном операционном центре, оборудованном последним словом техники, он мониторит их движение через беспилотные дроны. Ханз определяет местоположение Бена и Эда как идеальных героев для нового эпизода шоу “Rundead,” которое будет транслироваться мировой аудитории жаждущих развлечений после катастрофы. Часть 57: Захват Бена и Эда (Пятьдесят седьмая страница) Экстерьер. Периферия безопасного города. Сумерки. Ханз и его команда выслеживают и захватывают Бена и Эда на окраине города, где они ищут прибежище. Используя высокотехнологичное оборудование и соблазн выхода в прямой эфир на весь мир, они пленяют их и переносят в локацию, где будет проходить шоу. Часть 58: Похищение в прямом эфире (Пятьдесят восьмая страница) Интерьер. Контрольная комната Шоумастера. Ночь. Из своей контрольной комнаты, Ханз управляет действиями своей команды при похищении, одновременно транслируя все происходящее зрителям через спутник. Эда и Бена уводят прочь с применением необходимой силы, создавая напряжение и зрелищность для будущих зрителей шоу. Часть 59: Эд и Бен в лапах Шоу (Пятьдесят девятая страница) Интерьер. Гардеробная. Рассвет. Бен пробуждается, найдя себя в причудливо украшенной камере, где стены покрыты кадрами прошлых шоу Ханза. Эд, ослабленный химикатами, старается прийти в чувство. Ханз появляется, ярко одетый и полный энергии, приветствуя их как новых участников. Он объясняет правила игры: выжить среди ловушек и спасти Бена в клетке. Часть 60: Подготовка Эда к участию в шоу “Rundead” (Шестьтедесятая страница) Интерьер. Подземелье телестудии. Рассвет. Эд просыпается на промасленном полу унылой комнаты, пугливо разглядывая окружение. Ханз включает микрофоны и свет, заставляя Эда встать. Голос Ханза раздается через мегафоны студии, он объясняет Эду на микрофоне, что он будет протестирован, чтобы убедиться, что Эд сможет выжить и отправиться в путешествие через постапокалиптический лабиринт зомби. Если Эд провалит этот тест, Ханз пригрозил кормом для акул, затаившихся во тьме. МОЙ НЕЗАВЕРШЕННЫЙ СЦЕНАРИЙ ДЛЯ ФИЛЬМА “RUN ZOMBIE, RUN!”: PAGE 1 OF 150 (REDUCED): FADE IN: EXT. HOPE CITY - DAY A sprawling metropolis blends NATURE and SCIENCE. SOLAR PANELS glisten atop futuristic structures as ACADEMICS bustle along. A CACOPHONY of URBAN LIFE fills the air. A DIGITAL BILLBOARD cycles through the city’s LATEST DISCOVERIES in GENETIC ENGINEERING and CLEAN ENERGY. SWIFT PAN TO: INT. GENEsis LAB - CONTINUOUS The camera settles on ED KAYNES (30s), a BRILLIANT SCIENTIST at the forefront of a groundbreaking experiment. He works intently, a messiah of MIRACLES yet to come. ED (to his TEAM) Science today gives shape to tomorrow’s reality. Let’s push boundaries. CUT TO: EXT. HOPE CITY - NIGHT As dusk falls, the heartbeat of the city slows. ED and MARIA, his life PARTNER, step into the WARM EMBRACE of twilight, secrets of their research a SHADOW DANCE behind them. They share a TOAST in the quietude of their abode, a fleeting calm amid a LOOMING TEMPEST. MARIA A toast to our love and the dawn of a new era in science. Their KISS seals the vow, an ECHO in the void, a prelude to the TURMOIL ahead. CUT TO: [Note: This condensed rewrite focuses on highlighting the contrast between the protagonists’ academic life and their personal bond. Dramatic dialogue, bold capitals, and tense transitions are designed to fit industry-standard screenplay formatting for a one-page script in a LETTER-size document. Subsequent scenes should maintain narrative tempo and character depth, ensuring a gripping experience.] FADE OUT. END OF PAGE 1 OF 150 PAGE 2 OF 150 (REDUCED): INT. GENEsis LABORATORY - ED’S LAB - DAY The hustle of SCIENCE CITY fades into the hushed hum of the lab. ED KAYNES, mid-30s, a devoted scientist, scribbles fervently. SURROUNDED by beeping machines and his diligent TEAM busy at work. ED (to himself) The breakthrough we’re on… it’s unprecedented. MARIA, Ed’s better half and research powerhouse, observes a white rat through a glass enclosure, her face lit by the GLOW of progress. MARIA (looking up) It’s remarkable, Ed. You’re on the cusp of changing everything. Ed’s eyes never stray from the task at hand, his deep focus symptomatic of the lifelong quest that burns within him. ED (filled with anticipation) Once we crack this code, immortality won’t be myth; it will be SCIENCE. MARIA approaches with a tray of SYRINGES, each containing a glowing liquid. MARIA (holding up a syringe) This could be it, the key to life eternal. ED takes a syringe, veins coursing with adrenaline, as he administers the injection. ED’S TEAM lean in, their collective breath held. The rat stirs, its chest rising with potent vitality. TEAM MEMBER #1 (excited) It’s like witnessing the dawn of a new age, Ed! APPLAUSE breaks out. Ed and Maria share a moment of TRIUMPH — their dedication morphing into reality. ED (his voice rising above the applause) To science! To endless possibilities. To us! Maria squeezes his hand tightly. NIGHT FALLS, and they head home, unaware that their breakthrough grants not just life, but an unfathomable curse. FADE OUT. END OF PAGE 2 OF 150 PAGE 3 OF 150 (REDUCED): INT. GENEsis CONFERENCE HALL - DAY A fully packed auditorium watches in rapt attention as ED KAYNES, mid-30s, a VISIONARY of science, stands proudly before his PEERS and MEDIA, presenting the miraculous result of his team’s work—a revitalized, energetic white rat encased within a glass enclosure. The room erupts in OVATION as ED beams at the SUCCESS, looking over to where his wife and research partner, MARIA, claps enthusiastically among the crowd. Their eyes lock, a silent acknowledgment of the ripple they have created in the fabric of science. ED (over the applause, to the audience) Today, we stand at the cusp of a revolution—a breakthrough that could render aging obsolete. We’re rewriting the narrative of human life! MARIA (beaming, as she navigates closer, to a colleague) This is more than an experiment—it’s a peek into a future graced with the gift of unending youth. The crowd buzzes, their excitement a palpable force as the implications of Ed’s revelation dawn on the scientific community gathered there. AN AUDIENCE MEMBER (yelling over the chatter) Dr. Kaynes, what ethical considerations have been addressed here? ED (grasping Maria’s hand as she joins him on stage) We’re not just scientists; we’re stewards of humanity’s future. This is about enhancing life responsibly. Their embrace, a fusion of personal and professional triumph, is a powerful image against the backdrop of incredulous faces and flashing cameras—a promise made manifest under the bright lights of progress. CUT TO BLACK. END OF PAGE 3 OF 150 PAGE 4 OF 150 (REDUCED): INT. ED AND MARIA’S LIVING ROOM - NIGHT A cocoon of warmth envelops ED and MARIA as they STEAL away from fame into their sanctuary. Awards and articles fill the room, humble BRAGS of their intellect now bystanders to their intimacy. Ed, a bastion of brilliance outside, now just a man enthralled by a simple toast with his wife. The night’s silence, an accomplice to their SOLITUDE, nods in approval. MARIA (teasing as she uncorks the wine) To us, beyond the science and the spotlight. The GLASSES chime, a symphony for two. Laughter mingles with music—a rare cadence of careless joy in their orchestrated lives. ED (his eyes alight with untold devotion) To the miracle of you, the REAL discovery of my life. They dance, bodies in SYNC, hearts even more. A love born of shared dreams, now a private rebellion against the world’s PACE. MARIA (her voice a whisper, but her words strong) Tonight, let’s just be us—no titles, no future… Just Ed and Maria. The record player spins, the needle drops, and the melody wraps around them—a tender shroud against the MORROW’S uncertainty. Their embrace lingers, a silent promise that here, in this moment, love is their ONLY TRUTH. FADE OUT. END OF PAGE 4 OF 150 PAGE 5 OF 150 (REDUCED): INT. GENEsis LABORATORY - ED’S LAB - NIGHT The LABORATORY lies in silence, SHROUDED in darkness. Suddenly, an ALARM BLARES, a piercing sound that rips through the quiet. MONITORS burst into life, illuminating the panic within. CUT TO: MONITOR FOOTAGE A white rat, previously SUBJECT to an experiment, STIRS in its cage. What was ONCE DEAD now WRITHES BACK TO LIFE, its eyes glowing with a PERVERSE LIGHT. EDWARD “ED” KAYNES (mid-30s), watches from a REMOTE LOCATION, his eyes widening in HORROR. ED (into phone, voice trembling) Alert the team… It’s the rat… It’s RESURRECTED. He’s met with a CHORUS OF DISBELIEF from the other end. CUT TO: SECURITY FOOTAGE The creature ESCAPES its confinement, its movement unnatural and frenzied. The haunting sequence foreshadows the IMPENDING STORM. BACK TO: ED, a manifestation of DESPAIR and DREAD. He knows they stand on the brink of CATASTROPHE. The CAMERA ZOOMS IN on the rat as it disappears into the LABORATORY MAZE, an ominous prelude to chaos. ED (shouting into phone, panic-stricken) GET TO THE LAB! Now! The laboratory is a maze, a chamber of SECRETS and now TERROR. The ticking clock is evident, with every second leading them closer to DISASTER. FADE OUT. END OF PAGE 5 OF 150 PAGE 6 OF 150 (REDUCED): INT. GENEsis LAB - RESEARCH FLOOR - DAY The bustle of SCIENCE CITY fades into the focused HUM of GENEsis Lab. EDWARD “ED” KAYNES, mid-30s, a VISIONARY in a white lab coat, strides past HIS TEAM engaged in the intricate ballet of RESEARCH and development. The walls tell stories of BREAKTHROUGHS; patent plaques and photographs capture moments of GLORY as they triumph over nature’s riddles. ED (to the TEAM) Prepare the PROTOTYPE! We’re making history with every test! MARIA, Ed’s WIFE and colleague, adjusts her MICROSCOPE, the digital display beside her alive with PROMISING GRAPHS and images of rejuvenated cells. MARIA (impressed) Eternal life’s in reach, Ed. It’s miraculous. Their exchange, an intertwining of the personal and monumental, resounds within the lab’s sterile walls, reminiscent of the SUBTLE CLINK of test tubes and distant HUM of machinery. CUT TO: EXT. SCIENCE CITY - MONTAGE Drive, PASSION, and ambition FILL the city as GENIUSES both young and old RACE to unearth the next frontier. A panoramic display of HUMAN DRIVE pulsating against the civil backdrop. INT. GENEsis LABORATORY - MAIN HALL - DAY ED, the RESEARCH TEAM flanking him, commands attention with his magnetic PRESENCE, anticipation THICKENING the very air. The research team, AVATARS of dedication, man stations that buzz with expectancy, all eyes upon Ed — the MAN of the hour. ED (soaring with confidence) Prepare for revelation, everyone. Our vision, our reality! INT. GENEsis LABORATORY - PRESENTATION ROOM - DAY Before the ABUZZ AUDIENCE, Ed declares confidently, the fated moment upon them. Their SYMPHONY of discovery, to be shared with the world, lies moments ahead. ED (prelude of a hero) Ladies and gentlemen, to the dawn of a new world unbowed by time! The crowd is rapturous, minds set ablaze with the IMMINENCE of a new ERA. An illustrious day on the scientific calendar UNFURLS before them. FADE OUT. END OF PAGE 6 OF 150 PAGE 7 OF 150 (REDUCED): INT. GENEsis LAB - R&D DEPARTMENT - DAY The LAB buzzes, a cacophony of SILENCE and SOUND. ED KAYNES, mid-30s, navigates the TEAM with a focused GAZE, each move ECHOING the steadfast beat of DISCOVERY. The room is a tapestry of motion — scientists pouring and mixing SOLUTIONS, the CLINK of glass, and the WHIRR of machines like a choir of PROGRESS. ED (to the TEAM, authoritative) Today, we make leaps, not steps. Let’s ensure our NAMES go down in HISTORY. MARIA, his PARTNER, studies a cell under a microscope, her expression that of a HUNTER spotting their prey. MARIA (intense, updating Ed) The CELL’S REGENERATION is hitting MARKS we didn’t dare to forecast. A triumphant ENERGY sweeps through, the glint of victory in their eyes, their minds mapping the borders of TOMORROW. CUT TO: INT. GENEsis LAB - HALLWAY - DAY ED strides with PURPOSE, his STEPS a DRUMROLL to their unveiling. Maria catches up, FALLING into STEP beside him, her eyes a REFLECTION of shared ANTICIPATION and apprehension. MARIA (slight trepidation) Is everything READY for the BIG REVEAL? ED (confident, determined) We’re set. Science and SAGA merge today. The DAWN of man’s hope! Team members peek from doorways, their looks cast towards Ed — the BEACON of their COLLECTIVE aim. CUT TO: INT. GENEsis LABORATORY - PRESENTATION ROOM - DAY Doors open to a FILLED room, their entrance a FANFARE to the POSSIBILITIES they’re about to present. ED (rousing the crowd) With every second we unveil MYSTERIES once bound to THE REALM OF GODS! APPLAUSE resonates, vibrations of edge of what’s to come. Maria and Ed, CENTER stage, the PULSE of a RENAISSANCE. FADE OUT. END OF PAGE 7 OF 150 PAGE 8 OF 150 (REDUCED): INT. GENEsis LABORATORY - LOBBY - DAY The spacious lobby awaits, bustling with the pre-event RUSH. Scientists and REPORTERS shuffle about; the air vibrates with ANTICIPATION. Ed’s TEAM ensures everything is PREPARED for the BIG REVEAL. In strides MARIA KAYNES, a picture of poise and intelligence. Her eyes shine with EXCITEMENT as she greets GUESTS at entry. MARIA (to guests, welcoming) Welcome all! A remarkable day awaits us. Dr. KIM, seasoned GENETICIST, approaches, her status in the community UNDENIABLE. DR. KIM (sincere) Maria, your work is awe-inspiring! Their LAUGHTER mingles with the surrounding CHATTER, the festive spirit UNCONTAINABLE. CUT TO: A private alcove, where two FIGURES discuss in hushed tones. Their silhouettes BEAR witness to the DEPTH of their conversation. FIGURE ONE (concern apparent) This could CHANGE everything, for better or worse! FIGURE TWO (substrate of doubt) The ETHICS, though—what are we UNLEASHING? Nearby, MARIA and DR. KIM nod. Their confidence BELIES the UNDERCURRENT of uncertainty. MARIA (to Dr. Kim) Indeed, a door to the future is OPENING. Dr. KIM (appraising) And it is YOU and ED who have found the KEY. The conversation is INTERRUPTED as an ASSISTANT approaches, urgency written across his face. ASSISTANT (interrupting, anxious) Dr. Kaynes, the audience is seated, waiting. MARIA (acknowledging) Thank you, we begin shortly. She EXCUSES herself, making her way towards the podium. Her step carries the WEIGHT of their work, the silent HOPE of the gathered. The audience whispers GROW LOUDER in her approach. MARIA’s voice ECHOES off the grand walls, MARIA (in mind) This is the moment everything changes. CUT TO BLACK. END OF PAGE 8 OF 150 PAGE 9 OF 150 (REDUCED): INT. GENEsis LABORATORY - PRESENTATION HALL - DAY The hum of the CROWD subsides as EDWARD “ED” KAYNES commands the STAGE, exuding a presence that captivates the AUDIENCE. The room buzzes with anticipation, every eye and lens trained on him. ED (with gravitas) Ladies and gentlemen, today we stand on the brink of a new frontier in science. Our breakthrough isn’t just an advancement—it’s a leap into a future where age is but a number, a mere footnote in the annals of time. A collective INTAKE OF BREATH fills the room as the CURTAINS draw back, revealing a display showcasing a previously aging rat, now VIBRANT, defying its biological clock. MARIA, Ed’s wife and partner in SCIENCE, beams with pride from the crowd, their shared journey REACHING its zenith. DR. HELEN KIM, prominent in the scientific community, feels the WEIGHT of this moment, the potential UNFOLDING before her—HOPE and POSSIBILITY colliding in the air. ED (CONT’D) (emboldened by the awe) We herald a new era—a time of boundless POTENTIAL, where the shadow of mortality fades before the LIGHT of knowledge. The FUTURE is brighter because of our invention. The cradle of eternal youth lies within our grasp. APPLAUSE floods the space, echoing off walls that have heard countless THEORIES and seen countless tests but nothing quite like this. Ed and Maria EXCHANGE a look that says it all—a mix of TRIUMPH and TENDERNESS. FADE OUT. END OF PAGE 9 OF 150 PAGE 10 OF 150 (REDUCED): INT. GENEsis LABORATORY - PRESENTATION HALL - DAY A HUSHED AUDIENCE clings to EDWARD “ED” KAYNES’ every word as he UNVEILS the cornerstone of scientific PROGRESS—a rat, PHOENIX, once aged, now VIGOROUS and youthful. ED (excitedly to the crowd) Behold PHOENIX, a leap beyond nature’s design, showcasing our “Elixir of Life”! The HOLOGRAPHIC DATA SCREEN comes to life, chromosomes TWISTING and SPLICING into a DANCE of IMMORTALITY. MARIA KAYNES, proud and strong, holds HIGH the vial containing the elixir, the AUDIENCE’s eyes wide in AMAZEMENT. APPLAUSE and CHEERS resonate, a CHORUS of PROGRESS and HOPE echoing far beyond the walls. ED (CONT’D) (booming over the noise) Today, we turn the page, facing tomorrow’s SUNRISE—timeless and untamed! CLOSE-UP on their TRIUMPHANT LOOKS as they stand at humanity’s new dawn, bound by LOVE, PURPOSE, and AMBITION. FADE OUT. END OF PAGE 10 OF 150 PAGE 11 OF 150 (REDUCED): INT. GENEsis LABORATORY - PRESENTATION HALL - DAY The gathered AUDIENCE waits in anticipation as EDWARD “ED” KAYNES addresses the crowd with a CONFIDENT smile. The spotlight is on him, illuminating both the MAN and his MISSION. ED (grinning wide) Thank you all! ETHICS in science is our compass—steering the ship of discovery in RESPONSIBLE waters. A HOLOGRAPHIC DISPLAY of “Eternal Life” shines behind him, drawing GASPS from the room. DR. ABBOTT rises from his seat, his voice filled with CONCERN. DR. ABBOTT (loudly) Ed, what about the ETHICAL weight of eternal YOUTH? Are there SAFEGUARDS? The audience leans in, their curiosity a RIPPLE through the silence. ED (nodding, reassured) Dr. Abbott, an ETHICS BOARD and strict policies are our GUARDIANS. We’re working hand in glove with POLICYMAKERS—society remains our PRIORITY. REPORTER (from back) Poised, professional. What of inequality? How will the ELIXIR be ACCESSIBLE? MARIA KAYNES, Ed’s pillar, steps forward, her voice a BEACON of MORALITY. MARIA (determined) Our VISION is global GIFT, not a privilege. FAIRNESS is the bedrock—SECURITY and RISK management our CONSTANT allies. BIOETHICIST (from left) Poignant, pressing. Is there a cost to the fabric of SOCIETY? UNINTENDED consequences? ED (firm, leader-like) Certainly, we tread with CARE on this voyage of the unknown. Our aim is to protect life, not ALTER the course of NATURE. APPLAUSE erupts around them, the sound of SOLDARITY and SUPPORT. Ed and Maria, united, reflect a FOUNDATION of SCIENCE and HEART. FADE OUT. END OF PAGE 11 OF 150 PAGE 12 OF 150 (REDUCED): INT. GENEsis LABORATORY - RECEPTION HALL - EVENING The grand reception hall EMANATES elegance under the glow of CHANDELIERS. A well-dressed crowd MINGLES, celebrating the groundbreaking presentation of ED and MARIA. To a background of clinking GLASSES and sophisticated CONVERSATIONS, ED, bespectacled, and Maria, draped in grace, stand as the event’s CENTERPIECE. CHANCELLOR WEST, respected in academia, approaches with a firm HANDSHAKE. CHANCELLOR WEST (to Ed, with GRAVITAS) Remarkable, Ed! The city’s abuzz with excitement. ED (humbly proud) It’s teamwork, Chancellor. Coming together for a breakthrough. Maria nods in AGREEMENT, a proud smile spreading across her face. CUT TO: A private alcove casts shadows on a DEBATE between DR. ROSENBERG and MS. SHARPE, a discussion etched with CONCERN and ETHICAL weight. DR. ROSENBERG (concerned) The box is opened… can they now control what spills forth? MS. SHARPE (intellectual poise) Profound implications. Only time will reveal if their AMBITION precedes CAUTION. ACROSS THE ROOM Ed and Maria are locked in lively discourse with YOUNG INVESTORS, spreading EXCITEMENT about their research while REMAINING grounded in their ethical RESPONSIBILITIES. MARIA (to investors, reassuring) Our work dives deep. It touches the future with a REQUIRED MORAL compass. YOUNG INVESTOR #1 (toasting, enthused) To a NEW AGE! An era defined by your vision! GLASSES RAISE, a symphony of HOPE and PERSPECTIVE. Ed catches Maria’s gaze, their mutual INTENT pushing past the room’s collective BUZZ. They are the guiding force behind humanity’s LEAP into the unfolding future. FADE OUT. END OF PAGE 12 OF 150 PAGE 13 OF 150 (REDUCED): INT. GENEsis LABORATORY - MARIA’S OFFICE - NIGHT The stillness blankets MARIA’S office, the city skyline beyond glazed in night’s embrace. MARIA, a silhouette against the window, her mind a maelstrom of thoughts, contemplates the magnitude of the day’s events. The door EASES open; ED enters, pauses, then softly closes it, sealing away the clamor of their triumphant evening. CUT TO: CLOSE ON MARIA’S face, the city lights cast shimmering TEARS upon her cheeks. MARIA (Voice quivering with emotion) Today was extraordinary, Ed. But I feel unease… Like being on the cusp of a precipice we can’t see. ED approaches, encircling her waist with his arms, a gesture that FUSES their strength and resolve. ED (embracing her, comfortingly) Today, we stepped into the unknown, trusting in our work, in ourselves. Fear is the echo of our responsibility. Now entwined, ED and MARIA gaze upon the city—as vibrant as their aspirations, as precarious as their CREATION. CUT TO: TWO-SHOT - ED, chin on MARIA’S shoulder, they stand as mend to the LUMINOSITY of their victories and their VULNERABILITIES. ED (whispers, a trail of assurance) The future rests in our vigilance. With every gift of science, UNFETTERED, comes the GUARDIANSHIP of its implications. MARIA, turning, her eyes LOCKING with Ed’s; their faces the MIRROR of courage, love, and MUTUAL DETERMINATION. MARIA (Gently, eyes unraveling fears) Dreams realized could turn NIGHTMARISH. Together, we hold the JOY of discovery and the burdens it may warrant. ED (nods, earnestly) Side by side, WE endure. Our LOVE, an unwavering anchor against the tempests that may RISE. Their lips MEET in a PROMISE-FILLED kiss, an everlasting moment amidst the shadows of uncertainty and the glow of assurance. FADE OUT. END OF PAGE 13 OF 150 PAGE 14 OF 150 (REDUCED): INT. GENEsis LABORATORY - LATE NIGHT The lab’s hum has QUIETED. ED and MARIA lock away secrets that sing of MANKIND’S POTENTIAL, their hands lingering on the power switches. CUT TO: EXT. GENEsis LABORATORY - NIGHT The building’s stoic facade echoes the closing of a pivotal chapter. As Ed and Maria’s car blends into the NIGHTSCAPE, their silhouettes speak of burdens BORNE and HOPES kindled. MARIA (whispers in the peace of departure) The decisions we’ve made… they’ll define us, Ed. ED (voice laden with conviction) The journey we’ve embarked on— it’s RIGHT. Our reasons are JUST. CUT TO: EXT. ED’S HOUSE - BACK PORCH - NIGHT A PLAINTIVE symphony of nighttime AMBIENCE cradles Ed and Maria as they stand beneath the cosmos. The FLICKER of candles is a testament to their enduring bond. MARIA (voice edged with trepidation) This tranquility… May it shield us from the GALE that awaits. ED (embracing her, a bastion of SOLIDARITY) Together, we are an UNBREAKABLE force against the tempest’s FURY. Their union, forged in the CRUCIBLE of science and love, remains UNWAVERING under the stars that judge in silence. FADE OUT. END OF PAGE 14 OF 150 PAGE 15 OF 150 (REDUCED): INT. ED AND MARIA’S CAR - NIGHT The silent murmur of the city fades as ED and MARIA’s car glides through the night, a sanctuary from the world. They laugh and joke, the air filled with the electric buzz of their SUCCESS. ED (eyes twinkling with mischief) To think, we’ve unlocked the secret to eternity. MARIA (her voice lilting, playful) Just another day at the office for the Kaynes duo. Their car pulls into the driveway, the engine purring to a halt. The night’s embrace cocoons them, a momentary pause in the relentless march of their lives. ED (softly, as they sit in the car) This is it, our leap into the unknown. Our work—our love—could change everything. MARIA reaches out, her hand finding ED’s in the darkness. A silent connection that speaks louder than words—a promise of unity against the shadows they’ve cast. MARIA (squeezing his hand, earnestly) No matter what tomorrow brings, we face it together, Ed. Side by side, come what may. The LIGHTS from their house beckon, a warm glow against the dark, an anchor amidst the storm of change they’ve set in motion. ED (grasping her hand, conviction in his voice) As long as we have each other, Maria. We can face eternity. Their doors open, and they step out into the night, hand in hand—their bond a beacon against the uncertainty that awaits. CUT TO BLACK. END OF PAGE 15 OF 150 PAGE 16 OF 150 (REDUCED): INT. ED AND MARIA’S HOME - LIVING ROOM - NIGHT The room, bathed in WARMTH and soft candlelight, whispers romance. ED and MARIA, entwined on their sofa, are islands amid a sea of chaos. Their conversation is intimate, a DEFIANT DECLARATION against the pressure of their own breakthroughs. MARIA (her eyes sparkling with love) To think… we’re on the precipice of ETERNITY. ED (matching her intensity) With you, every DISCOVERY, every risk is worth taking. Our PASSION fuels more than just science. Their glasses CLINK, an ACCORD struck not just to their UNION but to the paradigms they dare to shift. ED (with a rare earnestness) Against the vastness of TIME, it’s US, together. Locked in step against the depths of the UNKNOWN. Their embrace is the EPICENTER of the room. Any DISTRACTIONS, the awards, the accolades, recede into insignificance against their shared moment. MARIA (repoising with a soft fierceness) As long as we STAND together, no MYSTERY too vast, no challenge too daunting. Their kiss marks an EPOCH – an ERA of their own making. They stand, the architects of a future written on their terms. FADE OUT. END OF PAGE 16 OF 150 PAGE 17 OF 150 (REDUCED): INT. ED AND MARIA’S BEDROOM - NIGHT The STORM outside casts a sharp silhouette on the steamy WINDOWS. RAINDROPS patter rhythmically against the GLASS, complementing the PASSIONATE SYMPHONY from WITHIN. Ed and MARIA’S LOVE overflows; a TANGIBLE ENERGY undulates through the AIR as LAYERS OF CLOTHING fall away like petals. Now BARE to each other and the WORLD, they find TRUTH in the touch of SKIN on SKIN. They SINK into the CARNAL EMBRACE, BODIES ENTWINED in the timeless dance of DESIRE. MOONLIGHT streams through the WINDOW, casting ghostly RAYS over their MELDING FORMS. A profound INTIMACY fills the ROOM as a SERIES OF DELICATE MOANS, and WHISPERS CRESCENDO with the increasing FERVOR of the NIGHT. CUT TO: Adjacent Frame: GRAPHIC DETAILS OMITTED. Passion-filled SILHOUETTES imply their RISING DESIRE. CUT TO: The CAMERA pans to the SECURITY FOOTAGE on their LAPTOP, UNNOTICED in their RAPTURE. The RAT, lifeless in its CAGE, suddenly CONVULSES. Its DECEASED FORM twitches, resurrecting into a GHASTLY VERMIN gnawing at reality and reason. The RAT’S maligned RESURRECTION unfolds clinically, STARK against their NATURAL HUMANITY. It begins to BATTER against the confines of its GLASS prison with AWAKENED FEROCITY, signaling the start of a RELENTLESS PLAGUE. BACK TO ED AND MARIA: Their UNION, though BLURRED in the SHADOWS, remains the FOCAL POINT. But the STORM outside picks up its PACE, LIGHTNING ILLUMINATING the BACKDROP of their PASSION and the IMPENDING DREAD that brews UNSEEN. CUT TO: EXTERIOR SHOT: The RAT’S FIGURE, now SINISTER and FRENZIED, ESCAPES into the swirling MAELSTROM, finding its way to the HEART of an UNEXPECTING CITY. CUT TO: The BEDROOM builds to a PASSIONATE ZENITH. In the FLICKERING LIGHT, TWO SOULS ENTWINED seem untouched by the LURKING TERROR. FADE OUT. END OF PAGE 17 OF 150 PAGE 18 OF 150 (REDUCED): INT. ED AND MARIA’S BEDROOM - NIGHT The bliss of sleep is shattered by the relentless RINGING of a phone. The serene sanctuary where ED and MARIA rest, shattered by the intrusive sound. MARIA, groggy and disoriented, nudges ED, who mumbles a protest, desiring nothing more than the warmth of their sheets and the comfort of their embrace. MARIA (mumbling) Ed… the phone. ED, reluctance etched onto his face, finally gives in to the relentless tone. ED (sighs) All right… all right. Grasping the phone, he speaks, his voice heavy, laden with a night of passion and rest now interrupted. ED (into the phone, groggy) Hello? An URGENT message detonates through the SPEAKER. A COLLEAGUE’s voice rife with panic and fear frays the edges of their peace. COLLEAGUE (V.O.) (with desperation) Ed—The TEST SUBJECT! We’ve lost control—it’s all gone wrong! Blood drains from ED’s face as shockwaves ripple through his very being, his mind struggling to comprehend the GRAVITY of the situation. MARIA interprets ED’s shocked demeanor, alarm radiating like the growing storm outside. MARIA (with rising urgency) Ed, what is it? What’s happened? ED’s eyes widen with the knowledge of what his confirmation will unleash, the storm outside a mere echo of the CHAOS about to unfold. SMASH CUT TO BLACK. END OF PAGE 18 OF 150 PAGE 19 OF 150 (REDUCED): INT. ED’S LABORATORY - NIGHT The silence of the lab is shattered as a SECURITY ALARM blares. Emergency lights flicker on, casting an ominous glow across the chaos of the once pristine workspace. Papers are strewn across the floor, signs of a hasty evacuation. SECURITY MONITOR FOOTAGE An alarmingly still, white rat suddenly convulses to life. Its awakening is sudden, eyes glowing with a newly-found, unnatural energy. SECURITY GUARD (calling out into the phone, voice frantic) Code Red! The subject… it’s not dead, it’s— ED KAYNES (interrupting, his tone urgent) Activate the lockdown protocol, NOW! The guard nods, steeling himself for the task as chaos erupts outside the confines of the secure room. CUT TO: TIGHT ON MONITOR - RAT’S ENCLOSURE The creature, once a herald of scientific breakthrough, is now a harrowing testament to the disastrous turn their work has taken. It gnaws at the enclosure, a terrifying force unleashed. BACK TO ED ED stands in his remote viewing room, the light from the monitors painting his face with the intense realization of the NIGHTMARE their discovery has birthed. CUT TO: EXT. LABORATORY GROUNDS - CONTINUOUS The undead rat finds its way through a barely-ajar window, its escape silent yet foreboding—the first ripple of the impending pandemic. CUT BACK TO: INT. ED’S LABORATORY - NIGHT The security guard hits a RED BUTTON, engaging the containment protocol. He knows the THREAT that looms large—what began in their lab now spills into the world. The lab, once a beacon of hope, now ground zero for an apocalypse never foreseen. FADE OUT. END OF PAGE 19 OF 150 PAGE 20 OF 150 (REDUCED): INT. ED AND MARIA’S BEDROOM - LATE NIGHT The persistent RINGING of the phone shatters the silence. ED, drowsy and disoriented, reluctantly picks up. ED (groggily) Hello…? CUT TO: INT. GENEsis LABORATORY - SECURITY ROOM - NIGHT Amidst the cacophony of ALARMS, DR. LAURA STEVENS, a fellow scientist, delivers frantic news over the line. DR. STEVENS (voice frantic) Ed, it’s chaos here. The test… it’s turned into an outbreak! MARIA awakens, her eyes wide with fear. MARIA (to Ed) What is it, Ed? ED (sitting straight up, urgency in his voice) It’s gone wrong at the lab. We have to go. Now. Without hesitation, ED and MARIA spring into action, ready to confront the unfolding nightmare. CUT TO: EXT. SCIENCE CITY STREETS - DAWN Their car speeds through the abandoned streets as dawn breaks over a city teetering on the brink of disaster. Inside the car, ED and MARIA exchange a look of resolute determination. MARIA (steadfast) We will contain this. ED (with a grim nod) Together, always. As they race towards the GENEsis LABORATORY, the sunlight struggles against the growing shadow of catastrophe enveloping the city. FADE OUT. END OF PAGE 20 OF 150 PAGE 21 OF 150 (REDUCED): INT. GENEsis LABORATORY - SECURITY HUB - NIGHT Inside the stark SECURITY HUB, a flashing RED ALERT signals chaos. The NIGHT SHIFT GUARD scrambles, SLAMMING the alarm button with a sense of DREAD. CUT TO: INT. GENEsis LABORATORY - CONTAINMENT AREA - CONTINUOUS The RAT—subject of radical experimentation—now TWITCHES grotesquely to LIFE. The GLASS encasement SHATTERS as it breaks FREE, an OMINOUS symbol of the nightmare to come. SECURITY GUARD (alarmed, breathlessly) Containment breach! The rat… it’s… alive?! CUT TO: EXT. SCIENCE CITY - DESERTED ALLEY - CONTINUOUS Shadows gather and stretch as Infected FIGURES slink through the abandoned STREETS. A lone scream splits the SILENCE—a prelude to the impending HORROR. ED (V.O.) (somber, as he watches the chaos unfold on multiple monitors) The Pandora’s box we opened… can’t be closed. CUT TO: INT. ED AND MARIA’S BEDROOM - HEAVY NIGHT The virgin serenity is violated by the PERSISTENT phone ring. Ed and Maria stir, a dark realization setting in—something TERRIBLE is happening. MARIA (fear edging her voice) Ed, what’s going on? ED (his voice a mix of urgency and disbelief) We’ve got to stop this. Now. They rush from their BEDROOOM, HEADING toward the LABORATORY as the first light of DAWN reveals the city’s unfolding NIGHTMARE—a METROPOLIS on the brink of chaos. FADE OUT. END OF PAGE 21 OF 150 PAGE 22 OF 150 (REDUCED): EXT. SCIENCE CITY STREETS - PRE-DAWN The city rests under a VEIL of DARKNESS. STREETLIGHTS flicker as the first hints of morning struggle to penetrate a growing, unnatural FOG. A SENSE of DREAD permeates the desolate URBAN landscape. Suddenly, an eerie SCUFFLE breaks the stillness. On a deserted ALLEYWAY, a HOMELESS MAN huddles for warmth, unaware of the DANGER lurking. His EYES SNAP open, on EDGE, and FEAR takes hold as he spots several FIGURES moving with an unsettling, LURCHING gait towards him. The RAGGED SILHOUETTES grow clearer - ZOMBIES in various stages of DECAY. WASTING no time, the MAN scrambles to his feet, but his movements are SLUGGISH, still dulled by sleep. He turns to FLEE, but the UNDEAD descend upon him with RELENTLESS HUNGER. His PANICKED SCREAMS shatter the SILENCE of the NIGHT, a SYMPHONY of TERROR that marks the DAWN of a new HORROR. The TRESPASS of his agony paints a CHILLING PICTURE; the PREDATORS tearing into their prey without MERCY, a GRIM FORESHADOWING of the CITY’S looming FATE. CUT TO: INT. GENEsis LABORATORY - SECURITY ROOM - CONTINUOUS The NIGHT-SHIFT SECURITY GUARD, wide-eyed, stares at the GRUESOME SCENE through his bank of MONITORS. SHAKEN to his core, he reaches for the RADIO, his voice QUIVERING as he reports the INCIDENT. SECURITY GUARD (into radio, voice TREMBLING) We have an ATTACK in progress… Sector 5, west alleyway. Immediate RESPONSE needed… He realizes the HELPLESSNESS of his position, the FUTILITY of calling for AID already TOO LATE for the UNFORTUNATE VICTIM. His hand SHAKES as he switches off the MONITOR, the IMAGE of the UNDEAD’S assault BURNED into his memory. To his HORROR, he understands that this solitary INCIDENT is but the PRECURSOR to a CITY-WIDE nightmare. The ZOMBIES are free, and SCIENCE CITY is their HUNTING GROUND. CUT TO: EXT. GENEsis LABORATORY - DAYBREAK The lab stands as a BEACON of light as the SUN RISES. PANIC is PALPABLE, even from within the fortified WALLS. ED and MARIA, wrap themselves in DETERMINATION. They must VENTURE into the APOCALYPSE they inadvertently CREATED. The WEIGHT of their ACTIONS has never felt HEAVIER, and as they STEEL themselves against what’s to come, a new RESOLVE hardens in their HEARTS. MARIA (with STEELY DETERMINATION) We’ve got WORK to do. The HORROR outside is now their CROSS to bear, their BREAKTHROUGH their PENANCE – and as DAYBREAK ushers in the FIRST LIGHT, the first step in a relentless BATTLE for REDEMPTION is taken. CUT TO BLACK. END OF PAGE 22 OF 150 PAGE 23 OF 150 (REDUCED): INT. CITY HOSPITAL - DAWN The serene morning light belies the CHAOS within the hospital. MEDICS and NURSES dash frantically, trying to stem the tide of the spreading INFECTION, now reaching critical mass. DR. MARTINEZ, the very image of strained control, directs her team in the mayhem. DR. MARTINEZ (urgent, authoritative) Isolate the critical ones — every second counts! An ORDERLY pushes a gurney past her; the patient’s twitching form is a stark indicator of the VIRUS’ virulence. CUT TO: EXT. SCIENCE CITY PARK - DAYBREAK The once tranquil park is deathly still. On the path lies an abandoned child’s bike, its wheel slowly spinning in the wind. A scream shatters the silence — not of joy, but of dread. A FIGURE stumbles from the trees — no longer a person, but a horrifying creature of contagion. CUT TO: INT. EMERGENCY BROADCAST CENTER - DAY A TENSE NEWS ANCHOR addresses the city from a bank of monitors. Alert and fear mix in her eyes. NEWS ANCHOR (tension in her voice) Citizens of Science City… stay indoors. The threat is real and immediate. BEHIND HER, technicians watch, expressions grave, understanding the weight of their broadcast. CUT TO: INT. GENEsis LABORATORY BUNKER - CONTINUOUS In a safe room, ED and MARIA stare at the LAB’s blueprints on a digital display, resolve in their eyes. MARIA (resolute, pointing) The antiviral vault… our only CHANCE. ED (nodding, a mixture of determination and fear) It’s do or die. Let’s move. They head for the door, stepping from SAFETY to UNCERTAINTY, their fate entwined with the city’s hope for survival. CUT TO: EXT. CITY HOSPITAL - DAYBREAK The light of a new day does little to lift the gloom. AMBULANCES rush into the hospital’s bay. A REMINDER of impending horror. The camera zooms out, capturing the hospital — a BEACON in the storm, bracing for the full brunt of the outbreak. FADE OUT. END OF PAGE 23 OF 150 PAGE 24 OF 150 (REDUCED): EXT. SCIENCE CITY - PUBLIC SQUARE - MORNING In the early daylight, the usually serene PUBLIC SQUARE is awash with UNEASE as DISTANT SIRENS wail and NEWS REPORTERS deliver urgent messages. A podium stands erected for the HASTY broadcast of an EMERGENCY ANNOUNCEMENT. NEWS ANCHOR (ON TV) (gravely) Residents of Science City, heed this emergency broadcast. Stay indoors; the outbreak has breached containment. Panic spreads as PEOPLE scramble for shelter. Suddenly, ZOMBIES erupt amidst the crowd, their GROANS and staggering forms a VIOLENT intrusion upon the living. INT. GENESIS LABORATORY - LOBBY - CONTINUOUS Bright overhead lights illuminate SCIENTISTS destroying research to prevent further spread of the contagion. ED and MARIA, determination on their faces, move toward the heart of the laboratory—the only place where a cure might still be found. ED (under his breath) The cure… it’s there. We must get it. MARIA (with resolve) We will, Ed. No matter the cost. They know the weight upon them is immense, but they are resolved to FACE the NIGHTMARE they have helped unleash. EXT. CITY HOSPITAL - CONTINUOUS As the day breaks, the hospital becomes the last bastion against the invisible enemy, with MEDICS and SECURITY DOING THEIR BEST in their combat against the relentless tide of the afflicted. From high above, the city stretches out—a BATTLEFIELD between CIVILIZATION and CHAOS where every soul fights for SURVIVAL. END OF PAGE 24 OF 150 PAGE 25 OF 150 (REDUCED): EXT. CITY STREETS - DAYBREAK As dawn breaks, the CITY reels under the onslaught of the UNDEAD. NEWS ANCHORS urgently broadcast from make-shift studios, their voices a GRAVE WARNING amidst the chaos. INT. GENEsis LABORATORY - DAY Inside the lab’s fortified walls, SCIENTISTS scramble to DESTROY EVIDENCE and CONTAIN the spread. ED KAYNES, a shocked expression on his face, preps for a MISSION that could end the NIGHTMARE. MILITARY CHECKPOINT - CITY OUTSKIRTS - DAY Soldiers set up barricades, the area a sea of activity. A MOTHER and her CHILD, eyes wide with FEAR, race past, fleeing the now CURSED CITY. CITY HOSPITAL - EMERGENCY WARD - DAY Medical staff move in an ordered CHAOS, fighting against the VIRUS’ relentless spread. HELICOPTER BLADES beat overhead, a desperate bid to evacuate the CRITICAL. CITY HALL - MAYOR’S OFFICE - DAY MAYOR THOMAS STONE addresses the CITY, his voice a ROCK against the storm of fear, his face betraying the weight of command. MAYOR STONE (into microphone, authoritative) We stand together against this darkness. Safety is coming. Hold on, Science City. GENEsis LABORATORY - ROOFTOP - DAY A HELICOPTER descends, the landing pad alive with URGENCY. MARIA KAYNES, her face streaked with TEARS but her resolve UNBROKEN, clutches the vial of HOPE. MARIA KAYNES (to herself, resolute) This IS our hope… our LAST CHANCE. I won’t abandon it. She defiantly shakes off aid, her determination FIRM as the blades whirl around her. The FATE of the city resting on the contents of that fragile glass. CUT TO BLACK. END OF PAGE 25 OF 150 PAGE 26 OF 150 (REDUCED): INT. GENEsis LABORATORY - ED’S LAB - DAY A LAB in DISARRAY, MARIA KAYNES enters, the scene of chaos a STARK reminder of their NIGHTMARE. ED KAYNES stands by the BROKEN cage that once held their LIFE-ALTERING test subject, his hand trembling on the fractured lock. MARIA (voice carries the WEIGHT of their reality) Our worst fears… they’re our reality now… ED (facing the magnitude of their crisis) The subject, our test… it’s ESCAPED. Their eyes meet, a chilling recognition of the DISASTER they’re up against, RESPONSIBILITY binding them heavier than chains. MARIA (RESOLUTE, urging action) We find the antidote, Ed. We FIGHT this—for THE WORLD, for OURSELVES. ED (nods, gripping his resolve) We RISE to this. As they move with PURPOSE, the MORNING SUN breaking through the gloom hints at a world CHANGED forever. CUT TO: EXT. SCIENCE CITY - DAWN Amid a city’s SHATTERED SERENITY, they press on. DESPERATION etched on every face, HOPE holding on by a thread. Their footsteps ECHO down the hall, a call to action. ED (determination shaping his words) For the CURE… For SURVIVAL. A city holds its BREATH as the duo moves ever FORWARD, the PARADOX of their situation—a symbol of HUMANITY at the brink. FADE OUT. END OF PAGE 26 OF 150 PAGE 27 OF 150 (REDUCED): INT. GENEsis LABORATORY - SECURITY ROOM - DAY The room is cast in the unsettled light of MONITORS displaying haunting FOOTAGE. ED, MARIA, and colleague DR. STEVENS witness the unthinkable: the lifeless rat suddenly RESURRECTED, its movements grotesque in the sterile environment of science. DR. STEVENS (voice trembling with dread) It’s… alive. A horror we never imagined. ED (a cold realization) Our creation has turned monster… MARIA, her face etched with despair, clutches at a flickering hope. MARIA (eyes welling up) The children… our city… DR. STEVENS (gravely) Communications are down. We’re blind to who’s left. ED’s hand instinctively curls into a fist, the weight of RESPONSIBILITY crashing down. ED (determined, voice rising) We face this. Find the antidote. We END this plague… Together, they form an uncertain shield against the encroaching DARKNESS. CUT TO: EXT. CITY - WIDE SHOT - DAY The city, sunlit but devoid of life, lies silent but for the mournful echoes of a disaster beyond comprehension. CUT TO: INT. GENEsis LABORATORY - RESEARCH WING - CONTINUOUS With urgency, ED and MARIA navigate the debris-strewn corridors to the VAULT—a promise of a cure within. ED (panting with urgency) For our children… for survival, we must reach the cure. Their rushed steps RESOUND through the desolate lab—an anthem of defiance in the face of calamity. FADE OUT. END OF PAGE 27 OF 150 PAGE 28 OF 150 (REDUCED): INT. GENEsis LABORATORY - CORRIDOR - DAY The corridor is dimly LIT by intermittent red strobe LIGHTS. Silence hangs HEAVY as ED and MARIA navigate the abandoned hallway, their steps ECHOING against the walls. ED’s eyes SCAN ahead, alert and ready; his mind, a WHIRLWIND of fear and defiance. Suddenly, a GROWLING growl disturbs the STILLNESS, growing in VOLUME and FEROCITY. From the dark recesses, ZOMBIES SHAMBLE into view. Their grotesque figures are HUNGRY shadows, MOANING as they inch toward their prey. MARIA (voice barely a WHISPER) Ed…! ED steps in front of her, positioning himself as a BARRIER between MARIA and the HORDE. It’s SCIENCE versus SURVIVAL now. MARIA begins to back away, but the zombies are swift, INEXORABLY drawn to the living. ED and MARIA retreat, the tension PALPABLE. But a SINGLE zombie BREAKS from the group, LUNGING with OPEN jaws. ED (fighting back, his voice laced with ADRENALINE) Maria, move! MARIA rushes forward, pushing ED, but the ZOMBIE’s teeth CLAMP down on his arm—a PAIN that SPEAKS of an irreversible CHANGE. MARIA screams, her echoing CRY filled with PRIMAL fear, the realization of NIGHTMARES becoming REALITY. INT. GENEsis LABORATORY - SECURE ROOM - CONTINUOUS Gasping for AIR, they stagger into the room, barricading themselves against the onslaught. ED GASPS, PAIN written all over his face as he clutches the gory WOUND. MARIA, eyes WIDE with TERROR, instinctively reaches out, her touch both COMFORTING and FRAUGHT with desperation. MARIA (sobs choking her words) We can STOP the bleeding… There’s still TIME. ED’s breath comes in ragged HUFFS; his skin GROWS pale, the VENOM of his new reality spreading through his VEINS. ED (straining to SPEAK) Maria… you have to GO… Find the ANTIDOTE… MARIA collapses against him, her WORLD faltering. The PAIN of facing what comes next—a future FILLED with ANGST and relentless PURSUIT—crushes down upon her. FADE TO BLACK. END OF PAGE 28 OF 150 PAGE 29 OF 150 (REDUCED): INT. GENEsis LABORATORY - SECURE RESEARCH FACILITY - DAY Urgency fills the LAB as MARIA, relentless, RIFLES through vials. ED, his skin GRAYING, lies motionless on a steel table, the life within him ebbing. With hands trembling, MARIA grasps a SYRINGE, the label reads “EXPERIMENTAL ANTIDOTE.” In her eyes, a silent PRAYER to whatever gods may listen. MARIA (with conviction) Hold on, Ed. This could be our ONLY chance! She injects the substance into Ed’s veins. Their eyes meet, a dance of HOPE and FEAR. Suddenly, Ed’s body jerks—an alarming CONVULSION. Maria whispers ENCOURAGEMENTS, her voice a LIFELINE in the storm. ED (gritting his teeth, voice STRAINED but hopeful) It’s… working. Keep pressing, Maria. A moment of calm, then CHAOS erupts as the undead BREACH the door—GROANS and mangled cries FILL the room. MARIA, with newfound strength, WIELDS a fire extinguisher, a SHIELD against the advancing HORROR. They edge toward the door, SURVIVAL their ONLY thought. ED (screaming above the noise) Maria, LEAVE NOW! In a desperate act, MARIA pushes Ed forward, but as FATE would have it, the swarm ensnares him. With SHEER will, he throws her out the escape hatch, SEALING his fate within. MARIA (anguished as she pounds the door) ED, I can’t… I WON’T leave you! Her cries mix with the fading tumult inside. As the door LOCKS, the silence is DEAFENING. MARIA (tears streaming, a new resolve CRYSTALLIZING) I’ll END this, Ed. For US. For EVERYONE. The HELICOPTER awaits, its engine WHIRLING, a beacon amidst the despair. Maria’s courage is her COMPANION as the blades slice the air, carrying her toward her next battle. CUT TO BLACK. END OF PAGE 29 OF 150 PAGE 30 OF 150 (REDUCED): EXT. GENEsis LABORATORY - SECURE RESEARCH FACILITY - DAY The sterile corridor is now a CACOPHONY of TERROR. MARIA bears witness to ED’S transformation—skin DEATHLY PALE, veins BLACKENING. With the shadow of ZOMBIFICATION drawing near, he makes a drastic choice. ED (fierce, urging) Go, Maria… SAVE the world… our kids. With a mix of HORROR and fierce resolve, ED flings himself through the window, landing on the concrete below with an UNNATURAL FEROCITY only the infected possess. Maria watches as Ed, now imbued with the strength and RESILIENCE of the undead but mercifully retaining his mind, staggers to his feet and storms into the city. CUT TO: EXT. CITY STREETS - CONTINUOUS With the mind of a scientist but the body of a MONSTER, ED trudges through the chaotic streets, an INSATIABLE need to protect and a relentless drive to find their children FUELLING his every step. MARIA, atop the HELIPAD, is torn between following ED and escaping with the CURE. Tears stream down her face as she calls out to him, though he can no longer hear her. MARIA (crying out with pain and resolve) I love you… I’ll FIND them, Ed. I PROMISE! With the cries of her husband ECHOING in her ears, Maria vows to carry on their FIGHT and SAVE the city they LOVE. She watches Ed disappear into the horde of zombies, a LONE figure of tragedy amidst the DEVASTATION. The HELICOPTER, hovering anxiously, becomes her only refuge. She climbs aboard, clutching the potential CURE to her CHEST as they ascend into the MAELSTROM. CUT TO BLACK. INT. GENESIS LABORATORY - SECURE RESEARCH FACILITY - DAY Forcing herself to FOCUS, Maria enters the lab again. The screen FLICKERS to life, INTERFACING with her fierce intellect. She SYNTHESIZES, refines, and tests with the TENACITY of a woman reborn from the ashes of her old life. MARIA (whispering fiercely) For Ed, for our children… for EVERYONE. Her hands never waver, her eyes never STRAY. The readouts WINK back at her with PROMISING data, and the COMPOUND finally takes form—a vial shining with the HOPE of HUMANITY. MARIA (declaring against the sounds of CHAOS) This is the BEGINNING. Run Zombie, RUN! END OF PAGE 30 OF 150 FEEDBACK AND SUGGESTIONS FOR IMPROVEMENT: I am glad to see your engagement with the script and concern for the effectiveness of the dialogue and overall narrative. I will provide feedback based on the first page, as requested: Page 1: - The opening line spoken by Ed, “Science today gives shape to tomorrow’s reality. Let’s push boundaries,” sets an ambitious and pioneering tone for his character and the movie’s thematic focus on scientific advancement. It effectively introduces Ed as a visionary and key driving force behind the plot. The brevity and clarity of this dialogue also make it easily digestible for the audience. - The dialogue between Ed and Maria post-work illustrates their deep personal and professional relationship. Maria’s toast, “A toast to our love and the dawn of a new era in science,” while crisp and meaningful, could be diversified to avoid the potential cliché of ‘toasting’ scenes. Consider adding unique character traits or backstory elements to make the moment stand out. - Their kiss as “an ECHO in the void, a prelude to the TURMOIL ahead” effectively uses foreshadowing. However, the actual conversation before this moment doesn’t convey as much about their relationship as it could. Is there playful banter, supportive dialogue, or philosophical musings that can occur here to add depth to their connection? - Overall, the dialogue appears clear and advances the plot as intended for the first page, though it stays within traditional conventions. Considering the film’s genre and the challenge of establishing context quickly, this works adequately, but moments that reveal the characters’ unique voices or vulnerabilities might further engage the audience. Suggestions for improvement: - Infuse additional personal details into the dialogue to provide viewers with a more intimate understanding of the characters. - Avoid potentially overused tropes by enriching the interactions with subtle nuances, such as their shared history or individual quirks, which can add layers to the conversation. - Consider the pacing and flow of the dialogue, ensuring it feels natural and consistent with the characters’ established personalities. The introduction sets up the story effectively, but digging deeper into the characters’ personalities and dynamic early on can strengthen the viewers’ emotional investment in their journey. Certainly! Continuing with the feedback on Page 2 of your provided screenplay: Page 2: - The inner monologue from Ed, “The breakthrough we’re on… it’s unprecedented,” conveys his exhilaration about the scientific progress. It succinctly captures his dedication. However, this dialog can be less explicit and more showing. Consider integrating Ed’s enthusiasm into interactions with his team members or the results they’re discussing, making it feel like a natural part of the scene rather than direct exposition. - Maria’s words, “It’s remarkable, Ed. You’re on the cusp of changing everything,” introduces the scale of their scientific endeavor but may benefit from deeper emotional layers. To elevate the intimacy and the gravity of their work, you could include personal stakes or how this breakthrough might uniquely affect their lives. - The intimate exchange as Ed administers the injection to the rat (“Once we crack this code, immortality won’t be myth; it will be SCIENCE”) showcases the characters’ shared vision. Yet, this is a moment where personal and ethical implications could also come to the forefront, perhaps through a nuanced question or a contemplative remark from Maria, thus adding complexity to the scene. - The page ends with applause and a toast, “To science! To endless possibilities. To us!” The triumphant tone is appropriate given the context of the scene; however, the dialogue risks falling into a trope. Each character’s individual reaction to this event could ground the scene more in personal experience and provide further insight into their personalities and motivations. Suggestions for improvement: - Provide subtext in the dialogue through which characters reveal their personalities, concerns, or unspoken tensions underlying the celebratory moment. - Balance Ed’s professional tone with personal elements that hint at the weight of their discovery beyond the lab’s walls. - Rather than having characters reiterate the significance of their work outright, allow the environment (reaction of the team, the energy in the room, visual cues) to communicate the success and implications. Your Page 2 establishes the excitement around the scientific breakthrough effectively. Still, the addition of character-driven nuances and heightened stakes through dialogue would enrich the storytelling and enhance audience connection with Ed and Maria’s journey. Analyzing the dialogue and narrative style for Page 3 of your screenplay: Page 3: - The dialogue during the Genesis Conference Hall presentation effectively establishes Ed as a confident and visionary leader in his field. His proclamation, “Today, we stand at the cusp of a revolution—a breakthrough that could render aging obsolete,” carries the weight of potential societal transformation. While the dialogue shows his confidence, it may also benefit from highlighting potential conflicts or challenges they’ve overcome to reach this point, making the scene not just a moment of triumph but also one of reflection. - Maria’s exchange with her colleague (“This is more than an experiment—it’s a peek into a future graced with the gift of unending youth.”), while succinct and thematic, would have more impact if it also suggested an understanding of the broader implications of their research. It’s an opportunity to explore her personal take on what they are doing, which can give the audience a richer sense of her character. - An audience member’s interjection concerning ethics is a compelling element that injects a sense of reality and consequences into the celebration (“Dr. Kaynes, what ethical considerations have been addressed here?”). However, the response given by Ed might seem too concise for such a complex issue. Exploring this dialogue further could open the narrative up to deeper philosophical questions and tension. - The dialogue at the end of the page, where Ed and Maria share a silent acknowledgment, is a strong visual that consolidates their partnership. Yet, their public response to the ethical question can also provide a moment of contrast against their personal views. It can be an opportunity to introduce hints of doubt or concern that they might harbor privately. Suggestions for improvement: - Offer a glimpse into the ethical debates surrounding their work beyond the enthusiastic acceptance, providing a more nuanced view of the characters’ world and the potential consequences of their breakthrough. - Explore Maria’s perspective and her balance between sharing Ed’s vision and acknowledging the gravity of their discovery – perhaps with a more conflicted or contemplative line. - Allow the applause to be interrupted by a handful of dissenting or concerned voices, illustrating that not everyone is ready to accept their “revolution” without question – this can hint at conflict that will develop later in the script. Page 3 succeeds in painting a picture of professional acclaim but consider fleshing out the characters’ inner worlds and the societal ripple effect of their work to deepen the narrative and set up future conflicts. Continuing with the analysis precisely for Page 4 of your screenplay: Page 4: - The intimacy in the dialogues between Ed and Maria (“To us, beyond the science and the spotlight.”) in their private living room scene is effectively contrasted with the public atmosphere of the previous page. It’s reflective and personal, reinforcing their bond. However, you could add more texture to this conversation by weaving in subtle references to their shared past or details that allude to their individual apprehensions or hopes. - When Maria asks for a toast “beyond science and the spotlight,” it grounds their relationship in something deeper than their professional achievements. Consider deepening this by adding an aspect of their life which is distinct from, and possibly compromised by, their professional ambitions. - Ed’s reply, capturing the “miracle” of their love, is a touching sentiment but risks veering into cliché. You might consider how each character uniquely expresses affection based on their personalities to keep the dialogue fresh and poignant. - Their dance and the detailed description of the moment (“Laughter mingles with music—a rare cadence of careless joy”) provide a pleasant sensory experience for the audience. However, punctuating this scene with more subtext or internal conflict can amplify the significance of these carefree moments against their otherwise high-stakes life. Suggestions for improvement: - Expand the dialogue to include more distinctive phrases or references that are unique to Maria and Ed’s relationship, making their interaction stand out as genuinely their own. - Introduce a brief moment of foreshadowing or concern here — a gaze out the window, a lingering touch, a shadow passing by — that subtly disrupts their joy and hints at the turmoil ahead. - Have Ed or Maria articulate a personal dream or fear that has nothing to do with their work, giving more dimension to their characters and foreshadowing the potential costs of their scientific ambitions. On Page 4, you skillfully shift from a public to a private domain, emphasizing the multifaceted lives of your characters. While the tenderness between Ed and Maria is clear, deepening their dialogue to reveal more nuanced emotions and internal conflicts can add complexity, setting up for the challenges they will face later in the narrative. Analyzing specifically Page 5 of your screenplay: Page 5: - The page opens with a gripping alarm scene, introducing a sudden shift in tone from the previous intimate atmosphere to a sense of impending danger. Ed’s dialogue, “Alert the team… It’s the rat… It’s RESURRECTED,” is direct and clear, effectively ramping up tension. This abrupt shift is engaging but also risks feeling a bit jarring if not adequately foreshadowed in previous scenes. - The shock and horror are aptly conveyed through the dramatic turn of events and Ed’s reaction. However, the emotional impact on Ed could be explored further. This could be achieved by adding a beat where he processes what he’s seeing on the monitors before jumping into action, thus allowing the gravity of the situation to sink in for both Ed and the audience. - The use of technical language (“Alert the team… It’s the rat… It’s RESURRECTED.”) works to establish his scientific background, but it’s also a moment where you could introduce some more character-specific language or mannerisms that reflect how Ed uniquely handles crises. - The phrase “twitches, resurrecting into a GHASTLY VERMIN” packs a visceral punch and vividly portrays the threat. However, the physicality of the rat’s transformation might be expanded upon to heighten the horror, potentially through Ed’s description or reaction. Suggestions for improvement: - Use this moment to delve deeper into Ed’s psyche; brief fragments of internal monologue could capture his initial disbelief and slowly dawning realization of the consequences. - Given the shock value of the situation, consider showing a moment of vulnerability or hesitation in Ed’s actions — does he immediately spring into problem-solving mode, or is there a moment of paralysis as the horrific implications of the experiment gone wrong hit him? - To enhance the realism, the dialogue could reflect a more natural response to crisis, potentially with fragmented sentences or interruptions as Ed attempts to communicate under duress. This page serves as a critical turning point, transitioning from the introduction of the characters’ personal and professional lives to the unleashing of the central conflict. It’s effective at initiating the core plot’s momentum; by infusing additional character-specific reactions and providing a more nuanced depiction of Ed’s response, the gravity of the situation can be amplified, and the narrative tension can be further escalated. I appreciate your engagement with the script and your focus on improving dialogue quality. Here’s the feedback for Page 6 of your script: Page 6: - Ed’s motivating dialogue to his team, “Prepare the PROTOTYPE! We’re making history with every test!”, successfully conveys his leadership and the high stakes of their work. Nonetheless, this line could sound generic, and infusing it with more of Ed’s unique voice or established character traits could enhance authenticity. - The transition to the view of Science City through the montage is a solid narrative device that broadens the scope and showcases the city’s innovative spirit. Still, there might be an opportunity to juxtapose the energy of the city with a subtle indication of underlying tensions or foreboding elements that hint at the upcoming chaos. - The team’s response to Ed (“Prepare for revelation, everyone. Our vision, our reality!”) displays their dedication and enthusiasm but drops a chance to introduce individual characters’ perspectives or concerns within the lab, which could add depth and potential side stories. - The palpable anticipation leading up to the presentation (“Ladies and gentlemen, to the dawn of a new world unbowed by time!”) has cinematic grandeur but might benefit from a balanced, grounded lead-up that also showcases the pressure and doubts characters like Ed might feel. Suggestions for improvement: - Personalize Ed’s pep talk with his distinctive style or incorporate him responding to a specific question from his team that illustrates the nuances of the prototype they’re developing. - Integrate foreshadowing elements subtly within the city montage—a glimpse of an issue that could stem from or relate to their research, for instance. - Diversify the lab team’s dialogue to include varying perspectives or hints at their individual stakes in the project’s success, as this could enhance the tension and offer a richer character palette. - In the lead-up to Ed’s final speech, consider showing him taking a reflective, possibly anxious moment, adding to his character’s depth and setting the stage for the gravitas of the official presentation. On Page 6, the script maintains its forward momentum, positioning Ed as the driving force of their scientific enterprise. Deepening the dialogue to showcase a more complex team dynamic and infusing the buildup to the presentation with personal conflict can offer a more nuanced portrayal of the story’s central figures and magnify the impending sense of disruption. Providing feedback on Page 7 of your screenplay: Page 7: - The environment of the “GENEsis LAB - R&D DEPARTMENT,” as captured through Ed’s directive (“Today, we make leaps, not steps. Let’s ensure our NAMES go down in HISTORY.”), effectively sets a scene of scientific rigor and ambition. However, the dialogue risks sounding melodramatic. Consider adding a layer of specificity or a moment that characterizes the lab work in a more nuanced way to truly emphasize the historic importance of this day for Ed and his team. - Maria’s responsive remark regarding the “CELL’S REGENERATION” hints at the magnitude of their research and acts as a narrative exposition. To enhance the depth of her character, it might be useful to see her grapple with the immediate scientific excitement versus the long-term implications of their work. - Moving to the “GENEsis LABORATORY - HALLWAY” and the buildup to Ed’s speech, the dialogue ("Prepare for revelation, everyone. Our vision, our reality!) serves to underscore the importance of the presentation. Yet, the opportunity to provide more personal stakes or reflections by individuals in Ed’s team is again missed, which could add a layer of complexity and tension to the scene. - In the “PRESENTATION ROOM,” Ed’s dialogue sets up anticipation (“With every second we unveil MYSTERIES once bound to THE REALM OF GODS!”). This grand statement provides a sense of the transformative potential of their work, but Ed’s claim would benefit from a contrasting view within the room to highlight the diversity of reactions such a bold statement might elicit. Suggestions for improvement: - Avoid hyperbole in Ed’s directives, instead injecting his dialogue with specifics about the research at hand or acknowledging the combined efforts and unique talents of his team members. - Explore Maria’s intellectual and ethical curiosity further; as Ed’s partner, she could express both pride and caution, reflecting the duality of their groundbreaking work’s potential outcomes. - Introduce micro-tensions or subtle interpersonal dynamics within the team that signal varying levels of commitment or concern about the announcement, to add depth to their shared goal. - Infuse the buildup to Ed’s speech with a blend of anticipation and anxiety. Perhaps an assistant or a technical glitch momentarily interrupts him—a detail that reveals Ed’s ability to manage unpredictability and foreshadows future complications. Page 7 continues to sell the grandeur of Ed and Maria’s achievements and the beam of hope they represent for human advancement. By focusing more closely on individual character moments, the impending sense of consequence their work may bring could be accentuated, providing a richer backdrop for the story’s climax. Absolutely, here is the feedback for Page 8 of your screenplay: Page 8: - The description of the “GENEsis LABORATORY - LOBBY” in preparation for a significant event with Maria greeting guests provides a setup that illustrates the LAB employing a public-facing role. Her dialogue, “Welcome all! A remarkable day awaits us,” sets an inviting, yet formal tone. This could be enhanced by adding a personal touch or nuance, reflecting Maria’s thoughts on the event’s potential repercussions or her anticipation for the day. - Dr. Kim’s compliment to Maria, “Maria, your work is awe-inspiring!” while positive, feels predictable. Developing their interaction with more unique dialogue or touching on their shared history or scientific opinions can add layers to both characters. - The alcove conversation (“This could change everything, for better or worse!”) is an excellent technique to introduce secondary characters’ perspectives, concerns, and the potential ethical dilemmas of the LAB’s work. The dialogue achieves this but may benefit from showing specifics that the featured figures are worried about, making their concerns more tangible and impactful to the story. - Maria’s subsequent dialogue with Dr. Kim feels a bit on-the-nose. The statement, “Indeed, a door to the future is opening,” sounds too abstract. It may be more effective to show them discussing the practical applications of their work or the societal changes they expect, grounding their conversation in concrete terms. Suggestions for improvement: - To increase the richness of dialogue, consider adding subtext that hints at the characters’ complex emotions or reservations regarding the momentous day. This would both deepen the characters and foreshadow the potential consequences of their work. - Explore more personalized greetings from Maria or comments from guests that indicate the LAB’s impact on different societal sectors, perhaps including subtle hints of dissent or unease among excitement. - Use the private alcove debate to delve into more nuanced discussions about the implications of the lab’s work, offering viewers insight into varied societal viewpoints and increasing tension. - Develop Maria’s dialogue to reflect her layered relationship with Dr. Kim and her own internal conflict about their work ahead of the day’s big reveal. It could cement Maria’s character as one with depth and personal stakes in the work beyond scientific achievement. Page 8 maintains the momentum of the narrative as it leads up to the pivotal reveal. The dialogue and interactions are functional but lack the complexity that could come from showcasing the characters’ more profound inner lives or the gravity of the day’s events. Heightening the emotional subtext and including more varied reactions would enhance the dialogue’s impact and create a more textured buildup to the subsequent dramatic events. I am ready to provide feedback on Page 9 of your screenplay. Here’s the analysis: Page 9: - The dramatic unveiling of the project, as detailed in Ed’s dialogue (“Ladies and gentlemen, today we stand on the brink of a new frontier in science.”), succeeds in giving off a sense of historic importance. However, his speech may benefit from the inclusion of a personal anecdote or a reference to the journey they’ve had, offering a glimpse into his own vulnerabilities or sacrifices made for the sake of their research. - The audience’s reaction, filled with applause and cheers, is an understandable response, yet it might be interesting to vary these reactions to reflect a range of emotions and to foreshadow any potential conflicts or oppositions to their grandiose claims. - Maria’s pride in the crowd and her non-verbal communication with Ed adds a neat, cinematic layer to the scene, seamlessly aligning the personal with the professional. It could be enhanced by deepening the understanding of Maria as an individual; perhaps she has mixed feelings or specific reasons for why their success means so much to her personally. - Ed’s bold claim about “rewriting the narrative of human life” is powerful and sets up the scope for what’s to come. Nevertheless, it could be juxtaposed with a brief, subtle counterpoint in the scene — maybe a character in the audience showing skepticism or concern — to introduce an undercurrent of opposition or ethical doubt in the narrative. Suggestions for improvement: - Provide Ed with a personal story or reflection that he can share during his presentation to make the science more relatable and palpable for the audience. This could humanize the implications of their work and ground his character. - Integrate a wider array of reactions among the audience to highlight the diverse ethical and social reactions their announcement could provoke, thereby creating a richer narrative texture and priming viewers for conflict. - Offer more insight into Maria’s internal world during the presentation. Perhaps she can have an aside or internal monologue that divulges her true feelings about their discovery and its wider societal impact. - Consider introducing a credible voice of dissent or skepticism among the supporters to add narrative tension and create a more realistic, multi-faceted response to the prospect of rewriting human life’s narrative. Page 9 of the screenplay effectively communicates the weight of Ed and Maria’s breakthrough and conveys their triumphant moment. To provide more depth and set the stage for the conflicts that will arise from their discovery, consider dialing up the personal stakes and introducing hints of the challenges that lie ahead. Of course, here’s the feedback focusing on Page 10 of your screenplay: Page 10: - Ed’s triumphant declaration about the rat named PHOENIX and the “Elixir of Life” efficiently captures the cinematic reveal of a central plot device. The boldness of the statement fits the moment, but taking the time to show a closer view of PHOENIX reacting to the elixir, with observers’ reactions interspersed, would bolster the scene’s dramatic impact. - The scenes with crowds responding en masse can detract from personal emotional engagement. While cheers and applause may be a realistic response, they limit the depth of individual character reactions. Introducing cutaways to key characters in the audience who react differently — whether with skepticism, concern, or personal interest — would enhance engagement and tease future plot complications. - Ed’s bold claim, “Today, we turn the page, facing tomorrow’s SUNRISE—timeless and untamed!” is suitably grand; however, without grounding it with personal stakes or presenting a visible reaction from the team, it runs the risk of feeling detached. A contrasting perspective from either Maria or another character could introduce a valuable narrative layer, suggesting the dawn of this new world is not without potential darkness. - The page culminates in their collective triumph, yet the dialogue risks falling into a triumphant cliché. It might benefit from the introduction of a narrative element that subtly undercuts the milestone, such as a behind-the-scenes problem, to seed conflict and foreshadow future challenges. Suggestions for improvement: - Break the monolithic crowd response into individual threads — key figures can offer whispered comments, raised eyebrows, or looks of consternation that speak volumes without uttering a word, thus diversifying the reception. - Incorporate more nuanced reactions or dialogues from Maria and the team members during the presentation to showcase a spectrum of perspectives and add depth to the shared experience. - Consider Ed’s own reflections on the presentation’s implications and the effect their breakthrough will have on his personal life. A moment of contemplation or doubt could provide an additional dimension to his character. - Offer a glimpse into potential ethical or logistical concerns that the “Elixir of Life” might raise. This might include a brief side conversation between characters or a visual cue that not everything is as perfect as it seems. Page 10 continues to build upon the central narrative event, solidifying Ed and Maria’s scientific triumph. By interspersing individual reactions and underlying doubts within the public spectacle, you can weave a richer tapestry that not only showcases the wonder of their achievement but also foreshadows the complex ramifications of their “Elixir of Life.” Analyzing Page 11 of your screenplay: Page 11: - Ed’s engagement with the audience regarding ethical considerations, “ETHICS in science is our compass—steering the ship of discovery in RESPONSIBLE waters,” is pivotal for establishing the moral compass of the characters. However, the dialogue may benefit from showcasing a more nuanced exploration of the concerns being raised, perhaps by allowing Ed to cite a specific example that reflects his and the lab’s commitment to ethical considerations. - The Reporter’s inquiry about accessibility (“What of inequality? How will the ELIXIR be ACCESSIBLE?”) is an excellent moment, prompting a discussion that delves into the societal implications of their discovery. While Maria’s response is articulate, it might be further enriched by showing a glimpse of the dynamics between the researchers and policymakers. This would highlight the complexity of applying such a profound scientific breakthrough to society. - A BIOETHICIST’s question about the potential cost to society introduces a critical voice that enriches the dialogue by painting a picture of the debate surrounding their work. Ed’s reassurance of careful advancement might carry more weight if paired with a previous instance illustrating their commitment to caution. - The applause from the crowd, signaling solidarity and support, could perhaps be somewhat subdued or mixed with whispers of dissent to continue layering the public’s range of reactions to the breakthrough. Suggestions for improvement: - Present a moment where Ed or Maria references a particular protocol or past decision that exemplifies their dedication to ethics, grounding their lofty dialogue in concrete examples. - Include not just assurances from Maria but also perhaps her acknowledgement of the difficult road ahead in ensuring fair access, reflecting her awareness of the challenging reality. - Allow for some audience members to exhibit visible reactions to the bioethicist’s concern—some nodding in agreement, others looking concerned or perhaps unconvinced—thus highlighting the variety of public opinions. - Ed’s confident proclamation of their approach to risk could tie back to personal experiences or previous setbacks, which might offer the audience a more intimate understanding of how both he and Maria view their responsibility to society. Page 11 does well in portraying Ed and Maria as mindful of the broader implications of their scientific work. However, deepening the dialogue to include specific references to past actions and showing the array of public responses can provide a more robust, multi-faceted narrative that underscores the complexities entwined with groundbreaking scientific discovery. Providing feedback tailored to Page 12 of your screenplay: Page 12: - The elegant reception juxtaposed with sophisticated conversations serves as an excellent backdrop for the unfolding drama. The interactions, particularly that of Chancellor West (“Remarkable, Ed! The city’s abuzz with excitement.”), convey the public support and recognition. However, the dialogue might seem formal and lacking in individual distinctiveness. A more personal exchange would add depth. Perhaps Chancellor West could share a brief, personal sentiment or a piece of wisdom that hints at the weight of their achievements. - The debate between Dr. Rosenberg and Ms. Sharpe in the private alcove introduces a thematic counterpoint to the public celebration. Their concerns (“The box is opened… can they now control what spills forth?”) are essential for sewing seeds of doubt. Building on this dynamic by incorporating a balanced dialogue that reflects specific fears could enhance the gravity of their words. - Maria and Ed’s interaction with the young investors has the right mix of excitement and groundedness. While celebratory, there is an opportunity here to capture a more complex texture by subtly portraying Ed and Maria’s apprehension, like a visual cue that implies their internal worries despite the veneer of confidence. - The scene captures the intellectual solidarity behind the couple with lines like “To a NEW AGE! An era defined by your vision!” While the dialogue successfully conveys the investors’ enthusiasm, it might benefit from displaying more nuanced investor interests or concerns, as this would provide a realistic portrayal of their response to such a game-changing innovation. Suggestions for improvement: - Include specific, character-driven responses from the Chancellor that offer a perspective on the implications of the breakthrough from the city’s leadership point of view. - Expand on the private conversation between Dr. Rosenberg and Ms. Sharpe. They could discuss actual incidents or data that ground their ethical and control-related concerns, rather than speaking in hypotheticals. - When investors toast to “a new age,” showing Ed or Maria having an aside conversation or an internal reflection might unveil their own concerns about what ‘the new age’ really entails for humanity. - Perhaps frame the triumphant moment with a subtle, dissonant undercurrent—something as simple as a spilled drink or a flickering light that doesn’t demand much attention but visually echoes the notion of imperfection and foreshadows future discord. On Page 12, you’ve depicted a multi-layered scene that ranges from public admiration to Intellectual contemplation. While the script continues to set a celebratory tone for Ed and Maria’s breakthrough, injecting moments of private reflection or conflict would enrich the scene’s emotional complexity. By providing more personalized and conflicting viewpoints, the celebrations can transition from a moment of unequivocal joy to a more thought-provoking scenario, leading to a more intriguing plot development as the story progresses. Here is the feedback specific to Page 13 of your screenplay: Page 13: - The scene in Maria’s office (“Today was extraordinary, Ed. But I feel unease…”) offers a moment for introspection and foreshadowing. Maria’s admission shows her sense of foreboding and adds emotional depth. Yet, the dialogue could dive deeper, perhaps conveying specific fears or doubts about their work’s ethical ramifications, to provide a fuller picture of her internal conflict. - Ed’s reassuring embrace and response (“Today, we stepped into the unknown, trusting in our work…”) acknowledge the risk inherent in their scientific endeavors. However, expanding on Ed’s emotional response could reinforce his character development, showing a side of Ed that grapples with the consequences rather than merely offsetting Maria’s concerns with optimism. - The visual of the couple looking out upon the city skyline is powerful. To enhance this, consider adding a dialogue or action that creates a personal connection to the city — perhaps it is the place they met, or they muse about how the city has changed. This personal attachment could serve to underscore the stakes of their actions for them as individuals, not just as scientists. - The final dialogue of Ed (“The future rests in our vigilance…”) solidifies their joint commitment to their work’s potential risks. While the message is clear, Ed’s dialogue might be further developed to express acknowledgment of the many uncontrollable variables that come with playing god — a daunting reality that would naturally weigh on their consciences. Suggestions for improvement: - Give more depth to Maria’s reflection by including an anecdote or memory that grounds her unease in personal experience, connecting her emotions to concrete events or possibilities. - Instead of Ed immediately providing comfort, allow him a moment to process Maria’s concerns, reflecting his own struggle with the dual nature of their promise and threat to humanity. - As they look out over the city, have them recall a more innocent time or make a parallel between their growth and the city’s evolution, which can draw a poignant contrast with the threat looming over it. - When Ed speaks of the future, incorporating a dialogue that acknowledges the fine line they’re treading and the precarious balance of their ambitions against the risks involved can add nuance to his outlook. The scene on Page 13 does well to contrast the public adulation with private vulnerability, offering a glimpse into Maria’s trepidations and Ed’s unwavering support. By expanding on their internal dialogues and emotions, revealing their shared history, and enhancing their connection to the larger narrative, the script can build greater empathy for Maria and Ed and set the groundwork for the ethical and emotional trials that lie ahead. Certainly! Here is the tailored feedback for Page 14 of your screenplay: Page 14: - The transitional moment to Ed and Maria’s home after their achievement (“The decisions we’ve made… they’ll define us, Ed.”) is emotionally insightful, providing a thematic bridge between their public success and private concerns. Yet, the dialogue as structured leans towards the expositional. It may benefit from more subtlety and show the nuances of their apprehension through actions — perhaps finger-tracing a photo from their past or a hesitant pause before turning off the last light in the lab. - The reference to the cosmos and their dance beneath the stars (“Together, we are an UNBREAKABLE force against the tempest’s FURY.”) captures their romantic unity well, but adding a layer that reflects on the contrast between their control over nature’s laws and their submission to the laws of the cosmos could add a poetic depth to the moment. - The language and descriptions are vivid (“…as they stand beneath the cosmos. The FLICKER of candles is a testament to their enduring bond.”); however, the script could provide a closer look into their personal dynamics. For instance, displaying a moment of gentle humor or shared secret that only they understand could further personalize their relationship. - The scene sets an intimate tone but also provides an opportunity to plant seeds of further conflict (“This tranquility… May it shield us from the GALE that awaits.”). Drawing a subtle connection to their inner fears about the upcoming challenges — maybe a troubling call they postpone answering — would serve to subtly ramp up the tension for the audience. Suggestions for improvement: - Introduce body language or actions that betray Maria’s or Ed’s inner turmoil without explicit dialogue. Perhaps they hold hands but with a grip that reveals underlying tension. - Consider giving Ed a line that inadvertently reveals his awareness that they cannot control everything — mirroring the unpredictable nature of the universe around them. - Utilize their serene environment for juxtaposition — a silent moment where both characters’ gazes might reveal concern or doubt, indicating that not all is as serene as it seems. - As they bask in the safety of the night away from the public eye, incorporate subtext into their dialogue suggesting that their outward confidence masks an understanding that their successes come with potential consequences they have yet to face. Page 14 emphasizes the loving bond of the two characters, focusing on their momentary peace amidst the implied brewing storm. By introducing subtle signs of conceivable foreboding and deepening the dialogue with intimate expressions unique to their relationship, the scene could amplify the emotional intensity and groundwork for the future developments woven throughout your narrative. Here’s the analysis tailored for Page 15 of your screenplay: Page 15: - The stillness of the car scene contrasted with the “silent murmur of the city” subtly sets up a reflective space for Ed and Maria to converse, isolated from the external world. Their banter, hinted at in lines like “Just another day at the office for the Kaynes duo,” can enhance the depth of their relationship by balancing professional achievements with their personal rapport. However, expanding this into an exchange that hints at the emotional cost of their work could foster even greater intimacy in their dynamic. - The tension in their shared promise (“No matter what tomorrow brings, we face it together, Ed. Side by side, come what may.”) effectively communicates the solidity of their relationship while foreshadowing the challenges ahead. Yet, this moment could be improved by juxtaposing the comfort of their embrace against a backdrop of subtle environmental unease, perhaps the flickering of a distant light or the rustling of wind — elements that insinuate disturbances in their safe haven. - Ed’s conviction (“As long as we have each other, Maria. We can face eternity.”) reaffirms their mutual dependency and strength, which is a recurring theme. To reinforce this message without redundancy, consider introducing a vulnerability in Ed — an unsteady breath or a lingering look at Maria that suggests even he is unnerved by the journey ahead, despite his confident words. - The dialogue hints at a broader emotional spectrum but remains within expected parameters of romantic reassurance. To add complexity, integrate indications of personal longing, unfulfilled dreams, or remnants of fear that tug at the fabric of their shared professional front. Suggestions for improvement: - Add layers to their in-car dialogue with subtextual cues that allude to personal sacrifices made or anxiety about the impact of their work on societal norms and personal lives. - Utilizing the car as a metaphorical vessel moving through the night, convey that while they’re together on this journey, outside forces — like the ones they have unleashed — are beyond anyone’s control. - Ed’s line about facing eternity together could link back to a previous instance of shared joy, a scientific dilemma, or a personal trial that they overcame, grounding their bond in shared experiences rather than just forward-looking pronouncements. - Introduce a subtle sense of foreboding into the scene — a sudden silence in their laughter, a fleeting sense of discomfort, or an unexpected reminder of the chaos they’ve left behind at the lab — to contrast with the notion of their safety and control. Page 15 captures a raw, emotional moment for Ed and Maria that reflects the steadfastness of their bond amid looming challenges. Deepening their exchange with personal revelations and incorporating understated signs of apprehension can enrich the narrative, providing a contrast between their profound connection and the unpredictability of the future they’re facing. Continuing with the focused feedback for Page 16 of your screenplay: Page 16: - Ed and Maria’s evening at home is depicted as a peaceful respite from their daylight achievements. The toast, “To the miracle of you, the REAL discovery of my life,” exemplifies Ed’s deep affection for Maria. However, this line could verge into familiar territory and might be made more effective with a unique twist that expresses Ed’s feelings in a way that’s characteristic of the personal language they share as a couple. - The description of their dance and intimate interaction is touching, yet without additional context, it risks echoing similar romantic scenes. Introducing a specific memory they share or an element that connects back to their scientific or moral journey would add originality and depth, anchoring their personal exchange in the unique fabric of their lives. - Maria’s reply, “As long as we STAND together, no MYSTERY too vast, no challenge too daunting,” affirms their partnership’s strength. To enhance its narrative power, consider adding a personal anecdote or introspective comment that acknowledges the paradoxes they’ve faced — underscoring her optimism with realistic underpinnings. - The scene’s close focuses on the ephemeral nature of their contentment. Reiterating their triumph (“They stand, the architects of a future written on their terms.”) does well to remind us of the larger narrative at play. Still, the inclusion of an interrupting element — such as an urgent phone call they choose to ignore — could subtextually question the stability and control they believe they possess. Suggestions for improvement: - Refine Ed’s expression of sentiment by tying it to a specific event or shared interest that accentuates the uniqueness of their relationship and personal journey. - During their revelry, leverage their environment or a nostalgic object within the scene to evoke a fond yet poignant memory, deepening the emotional resonance of their celebration. - Have Maria’s dialogue reflect a juxtaposition of her external confidence with an internal admission of the daunting nature of their achievements and the shadows these cast upon their future. - Introduce a subtle instance of discord during their tender moment that hints at the impending turbulence — a reminder that although they may be architects of the future, they are not fully in control of the foundation upon which they’re building. Page 16 effectively portrays a private moment of companionship and underscored triumph between Ed and Maria, serving as a poignant counterpoint to their public personas. By adding personalized elements and a hint of imminent conflict to their dialogue, the screenplay can create a more nuanced portrayal of their relationship and the consequences of their groundbreaking work. Certainly, here’s the analysis for Page 17 of your screenplay: Page 17: - The juxtaposition of the stormy exterior with the passionate encounter between Ed and Maria adds a dramatic and sensual layer to the story. The restoration of intimacy (“their LOVE overflows; a TANGIBLE ENERGY undulates through the AIR”) works well, but consider further enriching this scene by reflecting on the dichotomy between their control in professional life and surrender to nature’s unpredictability. - The depiction of their passionate display, while framed tastefully, could benefit from focusing on the emotional stakes of their relationship, as much as their physical connection (“MOONLIGHT streams through the WINDOW, casting ghostly RAYS…”). Perhaps interlacing their dialogues or thoughts about fears and aspirations amidst their intimacy could elevate the scene’s emotional quotient. - Their lovemaking represents a moment of unchecked human desire amidst their rigid scientific pursuits. To ensure this scene doesn’t feel cliché or out of place within the larger narrative, ground it with callbacks to their shared history or even worries about the future they are actively shaping. - The narrative twist — the rat stirring and its ominous resurrection as Ed and Maria are preoccupied — is compelling. Nonetheless, it would resonate more if the preceding passion was mirrored later with regret or a sense of omission due to their preoccupation, highlighting the theme of consequence. Suggestions for improvement: - Interweave thoughts or dialogue that reveal their underlying concerns during their lovemaking, constructing a contrast between their current bliss and the complexities of their reality. - As the story unfolds from passion to the rat’s sinister resurrection, parallel the ‘resurrection’ motif with dialogue or a symbolic element in the bedroom. Perhaps they knock over a picture or spill something unknowingly — a metaphor for unforeseen outcomes of their own making. - Include subtle nuances where Ed and Maria express unspoken desires or concerns for humanity amidst their love-making, to hint at the personal impacts of their ambitions. - After the passionate zenith, offer a moment where Ed and Maria, sated, fall into a reflective silence, pondering the fragile balance between their personal affections and the impending chaos that their work may unleash upon the world. In Page 17, you effectively contrast the tempestuous exterior with the storm raging within, expressing both Ed and Maria’s deep bond and the unforeseen forces they’ve inadvertently set into motion. Introducing forethought and personal reflection into their most vulnerable moments could serve to remind the audience of the broader narrative, further weaving the threads of their intimacy with the foreboding reality of their situation. Analyzing Page 18 of your screenplay with the same detailed approach: Page 18: - The sharp interruption of sleep by the phone call is a classic narrative device to signal an imminent crisis. Ed’s reluctant answer (“All right… all right.”) followed by an urgent message received is realistic and gripping. To intensify the moment, perhaps include a brief interaction where Maria senses something amiss before Ed confirms her fears, thereby enhancing the tension. - Maria’s immediate worry, followed by Ed’s assertive response to rush to the lab (“It’s gone wrong at the lab. We have to go. Now.”), illustrates their close partnership and shared responsibilities. Nonetheless, this could be further developed with a glimpse of their individual reactions to the situation. This would emphasize the gravity of the news without explicitly stating it and add emotional layers to the characters’ reactions. - The choppy transition from bed to lab (“blood drains from ED’s face as shockwaves ripple through his very being”) is evocative but leaves space for more nuanced storytelling. A visceral body reaction from Ed would simultaneously show his shock and allow the audience to better envision the stakes of what’s transpiring. - The scene ends with a SMASH CUT TO BLACK, which is a powerful directorial choice highlighting the abruptness of the situation. Consider splicing in a visual cue that underscores the breakneck pace of the unfolding emergency — a knocked over glass of water, a photograph knocked askew in their haste to leave — offering a visual metaphor for their disrupted lives. Suggestions for improvement: - Create a link between Ed’s grogginess and a sudden jerk into action to highlight his swift transition from personal to professional urgency, perhaps through Maria’s expression of concern or Ed’s abrupt movement. - Build Maria’s initial panic with prompts that reflect their shared consciousness of what might go wrong, even before Ed speaks. This could be done through a non-verleg dialogue showing her hurry to dress or pack essentials for a potential emergency situation. - Dive deeper into Ed’s physical reactions to the news with descriptive language that portrays not just shock, but also the dawning realization and the weight of their actions that is crashing upon him. - Add a visual cue of disarray in their home, symbolizing their world turning chaotic — a reflection of how the external chaos caused by their scientific breakthrough has started to penetrate their personal sanctuary. Page 18 is pivotal as it forces Ed and Maria to confront the consequences of their work head-on. By focusing on the characters’ individual responses to the crisis and including extra visual details, you have the potential to strengthen viewers’ emotional connection to your protagonists. This will set the stage for the characters to reckon with the reality of their choices and amplify the sense of urgency that propels them into the action that follows. Continuing with the targeted feedback for Page 19 of your screenplay: Page 19: - The scene in Ed’s laboratory is effectively chaotic, with the blaring security alarm presenting a clear sign of crisis escalation. However, the scene could benefit from more detailed reactions or a quick back-and-forth between Ed and the security guard, which would not only heighten urgency but also ground the action in reality. - Dr. Laura Stevens’ dialogue over the phone (“Ed, it’s chaos here. The test… it’s turned into an outbreak!”) efficiently conveys the severity of the outbreak. It might be enriched by adding personal concern for her colleague, which would underscore the relationship between characters and provide context for the panic. - The portrayal of the rat as a horrific symbol is a strong narrative choice. To further amplify the horror, consider including a description of the immediate impact of the rat’s behavior on other lab elements or personnel reactions as they discover the animal’s transformation, adding to the sense of dread. - The transition to Ed in the remote viewing room (“ED stands in his remote viewing room…”) is stark, which effectively places him in the position of an observer of his own nightmarish creation. To make this moment more powerful, perhaps have Ed reflect on the earlier cues of danger he overlooked, thereby connecting his character arc more directly to the unfolding mayhem. Suggestions for improvement: - Intensify the immediacy of the outbreak through descriptive language that not only details the actions being taken but also hints at Ed and the security team’s inner turmoil as they react. - Deepen the security guard’s sense of panic with layered dialogue that reveals his personal stakes in the safety of the lab — maybe he’s concerned for a colleague or a particular section of the lab. - As the rat’s resurrection unfolds, consider contrasting its grotesque reanimation with flashbacks or rapid thoughts of Ed’s previous observations, amplifying the regret and fear that it’s now unstoppable. - Integrate physical responses from Ed such as gripping the phone tighter, narrow misses as he hurries to control the situation, or a personal mantra to keep focused amidst chaos — humanizing him further and adding to the intensity of the crisis. Page 19 builds upon the tension and horror elements, escalating the stakes with the depiction of a critical and dangerous turning point. Enhancing character reactions and dialogue while balancing action and emotional resonance can convey the magnitude of the crisis, compelling the audience to feel the weight of what the characters are experiencing. Adding these personal touches and intensifying the urgency through Ed’s responses would enhance the emotional gravity of the situation and set the stage for the challenges ahead. Proceeding with the tailored feedback for Page 20 of your screenplay: Page 20: - The sequence of events from the characters’ hurried drive through the city at dawn to their arrival at the GENEsis LABORATORY develops a building sense of urgency and foreshadows the pivotal actions to follow. Ed and Maria’s determination is clear, but this could be heightened by small moments of doubt or fear that creep into their conversation or body language as they approach the lab — a tightening grip on the steering wheel, a quick exchange of worried glances. - The in-car dialogue as they prepare for what’s ahead (“We will contain this”) could use an extra beat to show both Ed’s and Maria’s character development. Instead of a straightforward affirmation, explore the possibility of them using their personal leitmotifs or shared experiences to reinforce their resolve, which can add emotional resonance to the scene. - The description of their race against time to the LABORATORY is visually engaging; however, adding environmental reactions to their urgency — like startled birds taking flight or a streetlight flickering out as they pass — could symbolically enhance the gravity of the situation. - The final lines of the page (“The sunlight struggles against the growing shadow of catastrophe enveloping the city”) is effectively metaphorical, aligning with the broader thematic tension. Fleshing out this metaphor, maybe by highlighting signs of normal life abruptly thwarted or by capturing the last remnants of calm before the full scale of the disaster is revealed, can underscore the impending calamity. Suggestions for improvement: - Introduce more personal, introspective dialogue during their car journey, allowing Ed and Maria to briefly reflect on the human cost of their choices, juxtaposing their professional resolve against their private apprehensions. - During the car ride, provide a moment of silence — a sudden pause in their conversation signifying a mutual realization of the enormity of the crisis — to let the weight of the situation settle on both them and the audience. - Expand the depiction of dawn as they drive, perhaps by drawing a parallel between the encroaching daylight and the growing darkness of the outbreak, deepening the irony of the setting. - Just before they step out at the LABORATORY, let there be a moment where Ed or Maria hesitates — a subtle nod to the fear of what they might find inside that stands in stark contrast to their outward bravery. Page 20 propels the narrative forward by transitioning Ed and Maria from the safety of their domestic sphere to the epicenter of the unfolding crisis. By incorporating more detailed environmental cues and personal emotional conflicts, the screenplay can provide a stronger sense of urgency as the characters move closer to the harsh realities awaiting them, intensifying the anticipation for what’s to come. Here is the focused feedback for Page 21 of your screenplay: Page 21: - The description of the GENESIS LABORATORY’s security situation (the security guard engaging the lockdown protocol) successfully communicates the immediacy of the threat, providing a solid image of the lab’s response to the crisis. To elevate the stakes even further, consider enhancing the guard’s dialogue with expressions of personal concern or including auditory cues, like the distant sound of chaos beyond the room. - DR. STEVENS’s line, “Communications are down. We’re blind to who’s left,” succinctly captures the tension of the moment and the heightened stakes for the characters. This dialogue could benefit from a follow-up expressing the implications of this breakdown, such as limiting their ability to warn others or hindering coordination with external emergency services. - The SHADOWS slinking through the streets marking the spread of the infection is a strong image. Here you could add more sensory details, such as a chilling breeze that Ed and Maria sense as they leave the car, which parallels the creeping fear and the invasive threat they face. - Ed’s solemn voice-over (“The Pandora’s box we opened…”) provides an insightful reflection on the situation. Yet, it could be paired effectively with a visual representation of the proverbial ‘Pandora’s box’ in the lab — a left-behind piece of equipment or an abandoned project — to visually underline the themes of unleashed catastrophe and lost control. - The transition from the laboratory’s internal lockdown to the streets’ deserted state paints a grim picture of the spreading disorder. Fleshing out these descriptions with more detailed accounts of the city’s descent into panic — such as visual or behavioral specifics of the infected — could further amplify the sense of dread. Suggestions for improvement: - Offer a glimpse into the security guard’s personal stakes within the situation through panicked mentions of needing to contact family or protect a particular co-worker, adding depth to the urgency. - DR. STEVENS’s dialogue could include a personal admission of fear for her own safety or for the unknown fate of her colleagues, increasing the emotional weight of her words. - Ed and Maria’s silent drive toward the lab could be intercut with more intimate, fearful exchanges between them, underlining their dread of what is to come and building emotional connection between them and the audience. - As they navigate the security hub, describe not only their actions but also the state of disarray in the lab around them, perhaps showing remnants of hastily evacuated workspaces or scattered personal items, which humanizes the crisis and the panic it induced. Page 21 does well in capturing the isolating and shocking nature of the crisis. However, enriching the dialogue with more direct emotive expressions and including sensory-driven descriptions could strengthen the characters’ internal experiences and highlight the devastating impact of the outbreak. This heightens the suspense as Ed and Maria move from their sanctuary and into action amidst the catastrophic event they inadvertently helped create. Delving into the feedback for Page 22 of your screenplay: Page 22: - The opening description of the city under a veil of darkness transitioning to morning sets a haunting tone, reinforcing the encroaching threat. The eerie scuffle in the alleyway, where the homeless man is attacked, is well portrayed. Nevertheless, this could be further enhanced by expanding on his experience — the chill of the air, the trash rustling nearby, the scent of impending rain — to deepen the reader’s immersion in the scene. - The urgency in the Genesis Laboratory’s response is palpable, with the guards activating protocols, but more personalized dialogue might emphasize the gravity of the situation. The guard’s alarm (“Code Red! The subject… it’s not dead, it’s—”) can become more evocative with a brief snapshot of the guard’s personal reaction to the rat’s sudden movement, like a shocked step backward or his hand quaking over the alarm button. - The NIGHT SHIFT SECURITY GUARD’s call to action (“Immediate RESPONSE needed…”) captures the emergency well. You might consider presenting the guard’s immediate decision-making or internal conflict — perhaps there’s a protocol to follow that he hesitates upon, showcasing the interplay between duty and panic. - The concluding image of the supposedly safe laboratory against the rising sun is illustrative, conveying the shift from order to chaos. To raise the impact of this scene, introduce foreshadowing elements: the early signs of disorder within the confines of the ‘beacon of light,’ such as security systems beginning to fail or scientists experiencing rising tension and fear. Suggestions for improvement: - Consider opening with a more in-depth description of the homeless man’s surroundings to build tension before the attack—details that would give readers a sense of false security that’s about to be shattered. - Provide a more visceral image or behavior of the attacking zombies to convey their menace and the infectious spread, ratcheting up the horror and urgency. - Infuse the dialogue with a moment or phrase that illustrates each character’s emotional state or exhibits their shock, such as the guard’s stutter or gasp, to increase the sense of immediacy. - Depict the morning light’s struggle as a metaphor for the oncoming struggle between order and chaos, potentially through the perspective of a lab scientist or an outside observer who remarks on the unsettling quiet of the morning. Page 22 effectively continues to set the tone of a city on the brink of disaster, with Ed and Maria as central figures poised to confront an unimaginable challenge. By focusing on the terror of the initial outbreak and the vulnerability of the lab’s defenses, you underline the themes of encroaching darkness and foreboding that escalate the narrative tension. Adding more sensory descriptions and character-specific responses can further personalize the sense of impending doom and heighten the emotional stakes for the audience. Reviewing Page 23 of your screenplay with the same level of detail: Page 23: - The depiction of chaos at the CITY HOSPITAL (“MEDICS and NURSES dash frantically”) effectively contrasts with ED and MARIA’s focused entrance into the GENEsis LABORATORY, highlighting the broader impact of the crisis versus the personal journey of our protagonists. To enhance this juxtaposition, try adding small conversational snippets between healthcare workers that reflect the overwhelmed nature of their response. - DR. MARTINEZ’s assertive command (“Isolate the critical ones — every second counts!”) provides a glimpse into the tension within the hospital. Building upon her call to action with an emotional inflection — perhaps an expression of desperation or weariness — could make her character more vivid. - The deserted park scene with the abandoned bike is poignant, yet to deepen its emotional gravity, consider layering in somber details of abandonment or interrupted life (a forgotten toy, an open lunchbox, a ringing phone in the distance). - The EMERGENCY BROADCAST CENTER scene conveys the panic of the situation; however, the NEWS ANCHOR’s tension might be further communicated through specific details, such as trembling hands or a cracked voice, as she attempts to maintain composure while informing the public. - When ED and MARIA navigate to the ‘antiviral vault,’ the straightforward statement (“The antiviral vault… our only CHANCE.”) makes their goal clear but could be elevated by internal dialogues that reveal their increasing realization of the heavy responsibility they carry to rectify the chaos. Suggestions for improvement: - Introduce layered dialogue among hospital staff that reflects personal stakes or reveals the rapidly deteriorating conditions, driving up the tension. - Imbue the hospital scene with more textured visual cues — monitors flashing critical data, a nurse pausing for a deep breath — to intensify the direness of the medical crisis. - Utilize atmospheric elements to evoke a melancholic response at the park — an echo of children’s laughter that has eerily absented or swings swaying in the silence. - Integrate subtle non-verbal cues from the emergency broadcast anchor that belie her professional facade, adding a raw human touch to the emergency announcement. - As Ed and Maria discuss the antiviral vault, underscore their dialogue with personal recollections or a shared mantra that conveys the magnitude of their forthcoming endeavor while highlighting their scientific dedication and moral imperative. In Page 23, your ability to contrast the public chaos with Ed and Maria’s private scientific mission is clear and well-drafted. By infusing complementary scenes with more detailed sensory information and the characters’ individual emotional reactions, the screenplay can offer a richer portrayal of the unfolding pandemic and increase the readers’ empathy for the protagonists and secondary characters alike, all of whom are struggling against the magnitude of the disaster. Providing feedback focused on Page 24 of your screenplay: Page 24: - The scene at the PUBLIC SQUARE during an emergency announcement is a significant moment that showcases the public impact of the crisis. The NEWS ANCHOR’s grave message adds to the sense of urgency. To elevate this scene, consider letting personal reactions from the crowd filter through the chaos — this could be a mother comforting her child or a man pausing to help someone fallen, encapsulating the fear and humanity present in the moment. - Inside the GENEsis LABORATORY, the contrast between scientists’ hasty destruction of research and the methodical preparation of ED and MARIA heightens tension. To intensify this moment, think about adding dialogue or internal monologues that reflect the scientists’ desperation not to let their work go to waste and their fear of it falling into the wrong hands. - The MILITARY CHECKPOINT scene with the detailed movements of soldiers and civilians adds dynamism to the sequence. To deepen the emotional impact, consider exploring the personal sacrifices of the soldiers tasked to hold the line and the emotional impact their duty has on them. - The MAYOR’S OFFICE scene with MAYOR THOMAS STONE sends a message of solidarity and command under pressure. While his authority is well conveyed, revealing his private concern or a reflective moment after making the announcement could provide a more rounded depiction of leadership during a crisis. - The scene on the GENESIS LABORATORY rooftop as MARIA awaits the helicopter could be more evocative if you included sensory descriptions of her near-heroic environment (the wind tugging at her clothes, the sound of the helicopter’s blades cutting through the air). This will resonate more with the audience if her personal determination is juxtaposed with her vulnerability at this crucial moment. Suggestions for improvement: - Give voice to individuals within the PUBLIC SQUARE crowd to offer brief insights into the collective panic and individual stories that make up the crisis, enriching the scene with diverse emotional textures. - Include snippets of conversation or actions reflecting the inner turmoil among the scientists in the lab, their dialogue cut short by the urgency of the situation or the struggle to choose which materials to save or destroy. - At the MILITARY CHECKPOINT, showcase brief exchanges between the soldiers that hint at the gravity of their orders and the uncertainty of what they are containing, creating a more personal connection for the audience. - Pair MAYOR THOMAS STONE’s public declaration with a small, personal gesture (like an old photo on his desk or a glance at a family portrait) to subtly indicate his personal stake in the city’s fate. - During MARIA’s wait for the helicopter, rather than focusing solely on her resolve, introduce physical symptoms of her anxiety (a clenched jaw, a nervous fidget) to provide glimpses into her internal battle between fear and resolve. Page 24 consistently hones in on the juxtaposition of public chaos against private emotional struggles. However, by enhancing the characters’ individual reactions and dialogues and adding sensory details, you can more powerfully evoke the looming threat and convey the emotional complexities of people caught in the maelstrom, creating a vivid tableau of the dawn of the outbreak. Following the specified format, here’s the analysis for Page 25 of your screenplay: Page 25: - The scene of the deserted city streets paints a compelling picture of the abandoned urban landscape. The details provide context for the scale of the outbreak. To strengthen the emotional engagement with this setting, consider sharing a bystander’s perspective—perhaps someone peering through a window, torn between joining the escaped crowd or staying hidden away. - Inside GENEsis LABORATORY, the description of the scientists destroying research helps build tension. However, it could benefit from a more intricate view of their conflicting emotions—perhaps they destroy the fruits of their life’s work with shaking hands, wrestling with the reality of what they’ve unleashed versus their passion for discovery. - The military checkpoint sequence, with soldiers setting up barricades, effectively reflects the societal response to the crisis. However, it could be enriched with brief interactions revealing personal glimpses into the lives of those affected—a brief dialogue that conveys their dread or determination. - The “beacon of hope” visual of the city hospital offers a stark counterpoint to the unfolding disaster. To add depth, consider incorporating an aspect of the hospital scene that illustrates the vulnerability of this last bastion—a whispered prayer from a medic or the quickened pace of an orderly. - The rooftop scene with Maria clutching the vial as a symbol of hope captures her courage amid the turmoil. This moment can be further magnified by letting physical elements reinforce her determination—a tightening grip on the railing as the helicopter approaches or a backward glance at the lab where so much of her life was spent. Suggestions for improvement: - Add a brief segment where we see the chaotic transition from normal life to abandonment on the city streets, underlining the speed and severity of the outbreak. - In the scientist destruction scene, include specific details about their reactions to hint at the morale within the lab—a muttered curse, a resigned sigh, or the final glance at their work. - At the military checkpoint, depict soldiers who are also parents, friends, or siblings, grounding their military actions in their personal lives and concerns for loved ones. - Within the city hospital tableau, interlace flashes of personal struggle or small acts of empathy amid the medical staff’s frantic efforts, bringing humanity into the crisis. - Highlight Maria’s vulnerability on the rooftop just before her escape, showing her fighting back tears or taking a steadying breath—a moment of human frailty before she steps into the role of savior. Page 25 of the screenplay does well to underscore the pervasive sense of desolation and danger as the outbreak spreads. By introducing more personalized touches to the description and by deepening the character reactions as they cope with the immediate needs of crisis management, the screenplay will engage the audience on a deeper level and heighten the emotional stakes of the narrative. Providing detailed feedback for Page 26 of your screenplay: Page 26: - Ed’s significant transformation detailed through physical description effectively portrays the horror of his metamorphosis. However, the emotional impact could be amplified by adding Maria’s visceral reaction to witnessing this change—perhaps her struggle between staying by Ed’s side versus the necessity to find their children. - The depiction of Ed’s staggering through the streets highlights the personal cost of the outbreak. To heighten the sense of empathy from the audience, consider showing him clutching a token of his family’s affection or having a flashback to happier times, which starkly contrasts with his current horrifying predicament. - Maria’s heart-wrenching cry (“I love you… I’ll FIND them, Ed. I PROMISE!”) carries emotional weight. Still, it would convey a greater sense of desperation if it were interspersed with Maria fumbling with a keepsake Ed has given her or looking at something that signifies their past happiness. - The end of the page, where Ed disappears into the horde, is a poignant moment. The narrative could resonate more deeply by briefly hinting at Ed’s internal struggle to maintain his humanity despite the zombification, perhaps through brief, coherent thoughts amidst the chaos of his transformation. Suggestions for improvement: - Introduce a direct internal monologue from Maria as she watches Ed’s transformation, revealing her mix of fear, love, and determination to continue their mission. - Provide Ed a moment of lucidity amid the zombies where he consciously takes a path away from Maria, showing his will to protect her even in his altered state. - Break Maria’s final vow with sobs or moments of breathlessness, adding to the authenticity of her emotional state and showing the rawness of her pain and resolve. - As Ed melds into the swarm of the undead, briefly narrate his stream of consciousness—a touch of Ed’s scientific mind trying to analyze his condition, interspersed with his longing to hold onto thoughts of his wife and children. Page 26 powerfully conveys the theme of sacrifice and the human cost of the escalating crisis. Enriching the scene with more deeply personal elements and internal character conflict can enhance the emotional gravitas and present a stark portrayal of the tragedy that Ed and Maria face as a result of their scientific breakthrough reverberating out of control. Continuing with the focused feedback for Page 27 of your screenplay: Page 27: - Ed and Maria’s dire situation in the security room conveys urgency well (“We face this. Find the antidote. We END this plague…”), but there is an opportunity to further heighten the tension through fragmented dialogue or through them exchanging meaningful, tension-filled looks that speak louder than words, showing both their dedication and underlying fear. - The rat’s ominous escape from the enclosure is a pivotal moment. Building in a slight delay in response from Ed as he processes the immediate threat, followed by a frantic scramble to counteract the escape, would underscore the high stakes and Ed’s personal investment in rectifying the crisis. - Maria’s pledge to find the antidote is a strong call to action. To deepen this moment, consider introducing a tactile element, such as Maria grasping Ed’s hand for strength or clasping a locket representing their family, to anchor her determination in something tangibly personal. - Their rush to the VAULT, underscored with the line “For our children… for survival, we must reach the cure,” communicates their overarching motivation. However, infusing this scene with sensory details (like the feel of cold storage units or the hiss of sealed doors) can create a more pronounced atmosphere around their mission. Suggestions for improvement: - Manifest Ed and Maria’s partnership in crisis through non-verbal expressions of unity and determination. Showcase a moment where one reassures the other with a single look, ensuring that it conveys both their unwavering bond and the gravity of their situation. - When the rat escapes, supplement Ed’s reaction to the escaped subject with visceral fear or even a moment of scientific curiosity battling his panic, delivering insight into how he reconciles his professional identity with the unfolding horror. - Elevate Maria’s dialogue of commitment — let the urgency of their mission be punctuated by a small, emotional quiver in her voice or a firm set to her jaw, which conveys her internal steel despite the terror. - During their pursuit of the antidote, interlace Ed’s and Maria’s movement with quick-paced, almost staccato-like dialogue that reflects their rush and the weight of the task they face. Page 27 captures the story’s pivot from fear to proactivity as Ed and Maria scramble to avert a worsening catastrophe. By refining character interactions to accentuate emotional stakes and enhancing descriptive details, the screenplay can bolster the tension, grounding the urgency of their quest in visceral terms and magnifying the narrative impetus as they press forward. As requested, here’s the tailored feedback for Page 28 of your screenplay: Page 28: - The intensity of the scene in the corridor as zombies shamble into view creates a powerful visual. However, the immediate threat could be heightened by adding detailed descriptions of the zombies’ appearance and the petrifying sound of their approach. - Ed’s protective action (“Ed… move!”) helps define his character’s bravery. To add depth, you could show a brief moment of internal conflict—he is determined to save Maria but agonized by the prospect of leaving his team behind or the lab where he spent years working. - The emotional impact when Maria is forced to leave Ed behind (“Maria… you have to GO… Find the ANTIDOTE…”) is palpable, but can be further heightened by deeper insights into Maria’s thoughts, perhaps through a compelling internal monologue where she grapples with her choice to stay or flee. - Ed’s sacrifice in pushing Maria out the escape hatch is a defining moment. Following this with a lingering description of Maria’s physical and emotional struggle could make the sequence more evocative. Perhaps she claws at the hatch or looks back one last time in hope, underlining the excruciating nature of her decision. Suggestions for improvement: - Emphasize the buildup of terror in the corridor not just through the zombies’ movements but also by invoking other senses—smell, sound, anticipation—to create a more immersive experience for the audience. - During Ed’s call to action, drawing upon his emotions can add layers to this critical moment—his might enact a small gesture that signifies his struggle or a whispered apology to Maria, revealing his anguish amidst the resolve. - As Maria resists leaving Ed behind, show more of her perspective, with vivid imagery or fragmented dialogue that captures her pain and desperation, making her dread and resolution more acute. - After her escape, while Maria’s despair is significant, intersperse her sobs with flashbacks to key moments she shared with Ed, counterposing her mission’s weight against her personal loss, thus intensifying the emotional stakes. Page 28 dramatizes the chaos erupting from the lab’s broken barriers effectively and showcases the emotional turmoil Ed and Maria face as disaster strikes close to them. Deepening the personal conflict and enhancing sensory details can further amplify the horror and tragedy of the scene, evoking a stronger emotional response from the audience and providing a profound insight into the characters’ shared agony and resolve. Certainly, here is the analysis for Page 29 of your screenplay: Page 29: - The urgency within the SECURE RESEARCH FACILITY is palpable, as heightened by Maria’s desperate action to synthesize the antidote. To deepen this portrayal, you might add physical manifestations of Maria’s stress—perhaps her hands tremble as she works, showing her determination interwoven with fear. - Ed’s condition and his weak encouragement, illustrated in his strained dialogue, captures the growing desperation of their situation. Greater insight into Ed’s mind at this crucial moment could be revealed through something he murmurs or an expression he directs at Maria—indicating his fading coherence as the transformation progresses. - The chaotic intrusion of the undead adds a layer of horror to the scene. This could be more chilling if complemented by sound cues from within the lab—such as heavy breathing, or the faint buzzing of scientific equipment in the background, heightening the atmosphere of dread. - The emotional climax as Ed sacrifices himself for Maria is profound. However, the moment where Maria is forced out (“ED, I can’t… I WON’T leave you!”) lacks a sense of her internal struggle. Adding Maria’s visceral response—screams muffled by the sound of the helicopter, or her physically being pulled away—would vividly portray the tension between her instinct to stay with Ed and the necessity to escape. Suggestions for improvement: - Fill out Maria’s frantic activity with expressions of her inner monologue, reminiscent of challenges they’ve overcome in the lab before or doubts she harbors about the possibility of a cure. - Enhance Ed’s deteriorating state with subtle details that emphasize the physical changes he’s going through, grounding the transformation in tangible symptoms. - Layer the soundscape of the breach by the undead with heightened descriptions of the environment reacting to their presence, increasing the sense of imminent peril surrounding Maria and Ed. - As Maria is pushed to escape, illustrate her emotional breakdown with physical details, such as her clawing the walls or leaving behind a personal belonging that Ed gifted her, symbolizing her severed connection with Ed. Page 29 effectively conveys the scramble for the antidote and the pivotal self-sacrifice that Ed makes. Intensifying the personal despair and psychological depth of this moment can boost the emotional resonance of the scene, firmly establishing the stakes of Maria’s escape and the tragic nature of Ed’s selfless act. By doing so, the script can bolster the intensity and set the stage for the impending narrative climax. Continuing with the detailed feedback approach for Page 30 of your screenplay: Page 30: - The moment of truth arrives as Maria engages in the intense procedure to synthesize the cure in the lab. Her words, “For Ed, for our children… for EVERYONE,” carry the weight of her profound responsibility. This could be further emphasized with action—Maria’s hands might hesitate before they steady, reflecting the immense pressure she feels to succeed. - As Ed rampages through the city, his poignant struggle to maintain his humanity despite his zombification reveals the duality of his character. To add nuances to this portrayal, consider incorporating subtle involuntary reactions that betray his inner turmoil, like a pained expression when he hears a familiar sound or a momentary falter in his determined stride. - Maria’s anguished declaration, “This is the BEGINNING. Run Zombie, RUN!” serves as a rallying cry and a climax to her emotional arc. To strengthen this pivotal scene, introduce physical indicators of Maria’s metamorphosis from despair to determination—perhaps she instinctively touches something Ed gave her for comfort or clutches the vial tighter, drawing on newfound resolve. - The culmination of the page brings intense emotion into focus. However, adding sensory details like Maria sensing the chemical fumes of the lab or the coldness of the steel surfaces she works on would ground her experience in vivid imagery, connecting the audience more deeply to each moment. Suggestions for improvement: - During Maria’s work in the lab, interweave snippets of her recalling Ed’s words or actions related to their past breakthroughs, which now guide her trembling hands through the synthesis process. - Illustrate Ed’s inner conflict through his actions—one moment aggressive as the zombies around him, the next pausing as if catching a whiff of Maria’s perfume in the air or a child’s laugh that anchors him to his fading humanity. - When Maria speaks her last line on the page, contrast her steely determination with a momentary crack in her voice or a tear that she brushes away, symbolizing the strength that emerges from vulnerability. - As the script transitions from Maria’s intense concentration to Ed’s monstrous existence, use these final beats to mirror their former life against the current chaos, such as by showing Maria momentarily distracted by a memory evoked by an item in the lab—perhaps a picture of their family or a memento from a significant event in their careers. Page 30 brings Maria’s emotional journey to a heightened turning point as she prepares to combat the undead nightmare her research partly created. By further anchoring her actions in personal significance and highlighting Ed’s struggle with his monstrous fate, the screenplay can emphasize the profound personal loss and determination at the heart of the story. This intertwines their scientific venture with the deep human emotions at play, setting up a gripping narrative as the plot moves forward. ИСПРАВЬТЕ ПОЖАЛУЙСТА PAGES 1 OF 150 МОЕГО ГОТОВОГО СЦЕНАРИЯ ИСПОЛЬЗУЯ FEEDBACK AND SUGGESTIONS FOR IMPROVEMENT, СДЕЛАЙТЕ ТАК, ЧТОБЫ ДЛИНА ТЕКСТОВ PAGES 1 OF 150 РАВНА ОДНОМУ ЛИСТУ LETTER 21,59 x 27,94 СМ,А ТАКЖЕ СДЕЛАЙТЕ ОЧЕНЬ МНОГО СЛОВ С ЗАГЛАВНЫМИ БУКВАМИ И В ДИАЛОГАХ : FEEDBACK AND SUGGESTIONS FOR IMPROVEMENT: I am glad to see your engagement with the script and concern for the effectiveness of the dialogue and overall narrative. I will provide feedback based on the first page, as requested: Page 1: - The opening line spoken by Ed, “Science today gives shape to tomorrow’s reality. Let’s push boundaries,” sets an ambitious and pioneering tone for his character and the movie’s thematic focus on scientific advancement. It effectively introduces Ed as a visionary and key driving force behind the plot. The brevity and clarity of this dialogue also make it easily digestible for the audience. - The dialogue between Ed and Maria post-work illustrates their deep personal and professional relationship. Maria’s toast, “A toast to our love and the dawn of a new era in science,” while crisp and meaningful, could be diversified to avoid the potential cliché of ‘toasting’ scenes. Consider adding unique character traits or backstory elements to make the moment stand out. - Their kiss as “an ECHO in the void, a prelude to the TURMOIL ahead” effectively uses foreshadowing. However, the actual conversation before this moment doesn’t convey as much about their relationship as it could. Is there playful banter, supportive dialogue, or philosophical musings that can occur here to add depth to their connection? - Overall, the dialogue appears clear and advances the plot as intended for the first page, though it stays within traditional conventions. Considering the film’s genre and the challenge of establishing context quickly, this works adequately, but moments that reveal the characters’ unique voices or vulnerabilities might further engage the audience. Suggestions for improvement: - Infuse additional personal details into the dialogue to provide viewers with a more intimate understanding of the characters. - Avoid potentially overused tropes by enriching the interactions with subtle nuances, such as their shared history or individual quirks, which can add layers to the conversation. - Consider the pacing and flow of the dialogue, ensuring it feels natural and consistent with the characters’ established personalities. The introduction sets up the story effectively, but digging deeper into the characters’ personalities and dynamic early on can strengthen the viewers’ emotional investment in their journey. МОЙ НЕЗАВЕРШЕННЫЙ СЦЕНАРИЙ ДЛЯ ФИЛЬМА “RUN ZOMBIE, RUN!”: PAGE 1 OF 150 (REDUCED): FADE IN: EXT. HOPE CITY - DAY A sprawling metropolis blends NATURE and SCIENCE. SOLAR PANELS glisten atop futuristic structures as ACADEMICS bustle along. A CACOPHONY of URBAN LIFE fills the air. A DIGITAL BILLBOARD cycles through the city’s LATEST DISCOVERIES in GENETIC ENGINEERING and CLEAN ENERGY. SWIFT PAN TO: INT. GENEsis LAB - CONTINUOUS The camera settles on ED KAYNES (30s), a BRILLIANT SCIENTIST at the forefront of a groundbreaking experiment. He works intently, a messiah of MIRACLES yet to come. ED (to his TEAM) Science today gives shape to tomorrow’s reality. Let’s push boundaries. CUT TO: EXT. HOPE CITY - NIGHT As dusk falls, the heartbeat of the city slows. ED and MARIA, his life PARTNER, step into the WARM EMBRACE of twilight, secrets of their research a SHADOW DANCE behind them. They share a TOAST in the quietude of their abode, a fleeting calm amid a LOOMING TEMPEST. MARIA A toast to our love and the dawn of a new era in science. Their KISS seals the vow, an ECHO in the void, a prelude to the TURMOIL ahead. CUT TO: [Note: This condensed rewrite focuses on highlighting the contrast between the protagonists’ academic life and their personal bond. Dramatic dialogue, bold capitals, and tense transitions are designed to fit industry-standard screenplay formatting for a one-page script in a LETTER-size document. Subsequent scenes should maintain narrative tempo and character depth, ensuring a gripping experience.] FADE OUT. END OF PAGE 1 OF 15
2919ba12566f9305515aaca9b5064383
{ "intermediate": 0.16476264595985413, "beginner": 0.6967318058013916, "expert": 0.1385054886341095 }
37,742
How can i pass the prompt with the value of variable(topic) in request in below code? import os from time import sleep from flask import Flask, request, jsonify from openai import OpenAI from flask_cors import CORS, cross_origin from dotenv import load_dotenv import requests from io import BytesIO load_dotenv() OPENAI_API_KEY = os.getenv("OPENAI_API_KEY") app = Flask(__name__) CORS(app, supports_credentials=True) # Init client client = OpenAI(api_key=OPENAI_API_KEY) @app.route("/test", methods=["POST"]) @cross_origin(supports_credentials=True) def create(): data = request.json topic = data.get("topic", "") assistant = client.beta.assistants.create( name=f"{assistant}", instructions="You are a helpful assistant", model="gpt-4-1106-preview", ) thread = client.beta.threads.create() prompt = f"Explain about '{topic}'" client.beta.threads.messages.create( thread_id=thread.id, role="user", content=prompt ) run = client.beta.threads.runs.create( thread_id=thread.id, assistant_id=assistant.id ) # Check if the Run requires action while True: run_status = client.beta.threads.runs.retrieve( thread_id=thread.id, run_id=run.id ) print(f"Run status: {run_status.status}") if run_status.status == "completed": break sleep(1) messages = client.beta.threads.messages.list(thread_id=thread.id) response = messages.data[0].content[0].text.value print(f"Assistant response: {response}") return jsonify({"assistant_id": assistant.id, "goals": response})
d475bf21dcb95c11200e6c5e3b365708
{ "intermediate": 0.7397283315658569, "beginner": 0.19442524015903473, "expert": 0.06584642082452774 }
37,743
i want to rewrite an already written script with another use for the application fruitcraft, this the code "import base64 import time import os from urllib.parse import unquote import json import requests import random import string os.system("cls") var = 'load_code_here' code=input('edata: ') code=unquote(code.replace("edata=","")) bs64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" xor = [24, 22, 49, 41, 12, 19, 12, 52, 12, 54, 25, 33, 28, 38, 5, 58, 12, 19, 0, 53, 13, 22, 5, 46, 29, 6, 49, 37, 28, 35, 8, 56, 14, 6, 9, 33, 28, 54, 21, 36] def getIndex(c) : if (ord(c) >= ord('A') and ord(c) <= ord('Z')) : return ord(c) - ord('A') elif (ord(c) >= ord('a') and ord(c) <= ord('z')) : return ord(c) - ord('a') + 26 elif (ord(c) >= ord('0') and ord(c) <= ord('9')) : return ord(c) - ord('0') + 52 elif (c == '+') : return 62 elif (c == '/') : return 63 else : return -1 def FruitCraftDecode(st): st = list(st) for i in range(0, len(st)) : if (st[i] == '=') : continue st[i] = bs64[getIndex(st[i]) ^ xor[i % 40]] st = str(base64.b64decode(''.join(st)))[2:-1] return st def FruitCraftEncode(st): st = list(str(base64.b64encode(st.encode()))[2:-1]) for i in range(0, len(st)) : if (st[i] == '=') : continue st[i] = bs64[getIndex(st[i]) ^ xor[i % 40]] st = ''.join(st) return st def send(url0, data, c): #time.sleep(1) url = "http://iran.fruitcraft.ir/"+url0 headers = { "Cookie": "FRUITPASSPORT="+c, "Host": "iran.fruitcraft.ir", "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "User-Agent": "Dalvik/2.1.0 (Linux; U; Android 14; SM-B980N Build/TP1A.230124.014)" } data = { "edata":data } response = requests.post(url, headers=headers, data=data, verify=False) return(response.text) def get_random_string(length): letters = string.ascii_lowercase result_str = ''.join(random.choice(letters) for i in range(length)) return(result_str) x = int(input("Count: ")) for x in range(x): load = var.replace("edata=","") cookie = get_random_string(32) result=json.loads(FruitCraftDecode(send("player/load", unquote(load), cookie))) print(result) resultofc = json.loads(FruitCraftDecode(send("live-battle/triggerability", (code), cookie))) print(resultofc) " here is another script to gather values and more reference about how these scripts should be "import base64 import time import os from urllib.parse import unquote import json import requests import random import string os.system("cls") cardcode="Gk4dSENREVxSQE1YU1FGUD4NFR4BLVtcGlhUQlEZ" bs64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" xor = [24, 22, 49, 41, 12, 19, 12, 52, 12, 54, 25, 33, 28, 38, 5, 58, 12, 19, 0, 53, 13, 22, 5, 46, 29, 6, 49, 37, 28, 35, 8, 56, 14, 6, 9, 33, 28, 54, 21, 36] def getIndex(c) : if (ord(c) >= ord('A') and ord(c) <= ord('Z')) : return ord(c) - ord('A') elif (ord(c) >= ord('a') and ord(c) <= ord('z')) : return ord(c) - ord('a') + 26 elif (ord(c) >= ord('0') and ord(c) <= ord('9')) : return ord(c) - ord('0') + 52 elif (c == '+') : return 62 elif (c == '/') : return 63 else : return -1 def FruitCraftDecode(st): st = list(st) for i in range(0, len(st)) : if (st[i] == '=') : continue st[i] = bs64[getIndex(st[i]) ^ xor[i % 40]] st = str(base64.b64decode(''.join(st)))[2:-1] return st def FruitCraftEncode(st): st = list(str(base64.b64encode(st.encode()))[2:-1]) for i in range(0, len(st)) : if (st[i] == '=') : continue st[i] = bs64[getIndex(st[i]) ^ xor[i % 40]] st = ''.join(st) return st #print(FruitCraftEncode("{}")); def send(url0, data, c): time.sleep(1) url = "http://iran.fruitcraft.ir/"+url0 headers = { "Cookie": "FRUITPASSPORT="+c, "Host": "iran.fruitcraft.ir", "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "User-Agent": "Dalvik/2.1.0 (Linux; U; Android 14; SM-B980N Build/TP1A.230124.014)" } data = { "edata":data } response = requests.post(url, headers=headers, data=data, verify=False) return(response.text) def get_random_string(length): letters = string.ascii_lowercase result_str = ''.join(random.choice(letters) for i in range(length)) return(result_str) #print(result["data"]["restore_key"]) x = int(input("Count: ")) for x in range(x): uid = get_random_string(16) var = '{"game_version":"1.10.10700","udid":"'+uid+'","os_type":2,"device_name":"unknown","kochava_uid":"KAdj046kd8jfn8jd9445d9bf3974473fb4bacd0b0b454e67","model":"SM-B090LB","metrix_uid":"-","os_version":"14","appsflyer_uid":"1604098877500-8086398948478074368","store_type":"parsian"}' load = FruitCraftEncode(var) cookie = get_random_string(32) result=json.loads(FruitCraftDecode(send("player/load", unquote(load), cookie))) key = result["data"]["restore_key"] resultofc = json.loads(FruitCraftDecode(send("store/buycardpack", cardcode, cookie))) print(key) " i want you to do the encode and decoding, this is the request text "import base64 import time import os from urllib.parse import unquote import json import requests import random import string os.system("cls") cardcode="Gk4dSENREVxSQE1YU1FGUD4NFR4BLVtcGlhUQlEZ" bs64 = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/" xor = [24, 22, 49, 41, 12, 19, 12, 52, 12, 54, 25, 33, 28, 38, 5, 58, 12, 19, 0, 53, 13, 22, 5, 46, 29, 6, 49, 37, 28, 35, 8, 56, 14, 6, 9, 33, 28, 54, 21, 36] def getIndex(c) : if (ord(c) >= ord('A') and ord(c) <= ord('Z')) : return ord(c) - ord('A') elif (ord(c) >= ord('a') and ord(c) <= ord('z')) : return ord(c) - ord('a') + 26 elif (ord(c) >= ord('0') and ord(c) <= ord('9')) : return ord(c) - ord('0') + 52 elif (c == '+') : return 62 elif (c == '/') : return 63 else : return -1 def FruitCraftDecode(st): st = list(st) for i in range(0, len(st)) : if (st[i] == '=') : continue st[i] = bs64[getIndex(st[i]) ^ xor[i % 40]] st = str(base64.b64decode(''.join(st)))[2:-1] return st def FruitCraftEncode(st): st = list(str(base64.b64encode(st.encode()))[2:-1]) for i in range(0, len(st)) : if (st[i] == '=') : continue st[i] = bs64[getIndex(st[i]) ^ xor[i % 40]] st = ''.join(st) return st #print(FruitCraftEncode("{}")); def send(url0, data, c): time.sleep(1) url = "http://iran.fruitcraft.ir/"+url0 headers = { "Cookie": "FRUITPASSPORT="+c, "Host": "iran.fruitcraft.ir", "Content-Type": "application/x-www-form-urlencoded; charset=UTF-8", "User-Agent": "Dalvik/2.1.0 (Linux; U; Android 14; SM-B980N Build/TP1A.230124.014)" } data = { "edata":data } response = requests.post(url, headers=headers, data=data, verify=False) return(response.text) def get_random_string(length): letters = string.ascii_lowercase result_str = ''.join(random.choice(letters) for i in range(length)) return(result_str) #print(result["data"]["restore_key"]) x = int(input("Count: ")) for x in range(x): uid = get_random_string(16) var = '{"game_version":"1.10.10700","udid":"'+uid+'","os_type":2,"device_name":"unknown","kochava_uid":"KAdj046kd8jfn8jd9445d9bf3974473fb4bacd0b0b454e67","model":"SM-B090LB","metrix_uid":"-","os_version":"14","appsflyer_uid":"1604098877500-8086398948478074368","store_type":"parsian"}' load = FruitCraftEncode(var) cookie = get_random_string(32) result=json.loads(FruitCraftDecode(send("player/load", unquote(load), cookie))) key = result["data"]["restore_key"] resultofc = json.loads(FruitCraftDecode(send("store/buycardpack", cardcode, cookie))) print(key) " and this is the response "HTTP/1.1 200 OK server: nginx/1.21.3 date: Tue, 16 Jan 2024 04:35:26 GMT content-type: text/html; charset=UTF-8 transfer-encoding: chunked x-powered-by: PHP/7.4.33 Expires: Thu, 19 Nov 1981 08:52:00 GMT cache-control: no-store, no-cache, must-revalidate pragma: no-cache 24 Gk4aRVJARhVDSBUIRFUZFwUPAA1HSGllRQ== 0 "
f180dca841381fb5ae6dfd866202ee46
{ "intermediate": 0.362628698348999, "beginner": 0.39640507102012634, "expert": 0.24096620082855225 }
37,744
Hi
bb6973e2cb394582e2593fc06037f535
{ "intermediate": 0.33010533452033997, "beginner": 0.26984941959381104, "expert": 0.400045245885849 }
37,745
I have a project using a FastAPI python backend with SQLAlchemy using SQLite and Pydantic, and a REACT frontend using antd. What I want to be able to do, is track all background tasks, if I could give some kind of ID or some kind of way to track to see what tasks are running, potentially see when it started, it's state(this could be dependant on the task itself, some kind of state variable passed by the FastAPI background task, could potentially even show the progress and estimated time remaining), and also see when a task finished, so it needs to keep a history of tasks as well. One example of how I am starting a background task currently from an endpoint:
14eeea495d5b0f42ae5d68f7ca9bf273
{ "intermediate": 0.7967931628227234, "beginner": 0.08680175244808197, "expert": 0.11640503257513046 }